Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,14 +29,14 @@ jobs:
const title = context.payload.pull_request.title;
const labels = context.payload.pull_request.labels.map(l => l.name);
if (labels.includes('dev')) {
const regex = /^(?!feat|fix|refactor|doc|perf|style|test|chore|revert)[a-z].*$/;
const regex = /^(?!feat|fix|refactor|docs|perf|style|test|chore|revert)[a-z].*$/;
if (!regex.test(title)) {
core.setFailed(
`PR title "${title}" does not match the commit format for non-user-facing changes`
);
}
} else {
const regex = /^(feat|fix|refactor|doc|perf|style|test|chore|revert)!?:[ ][a-z].*$/;
const regex = /^(feat|fix|refactor|docs|perf|style|test|chore|revert)!?:[ ][a-z].*$/;
if (!regex.test(title)) {
core.setFailed(
`PR title "${title}" does not match the expected conventional commit format for user-facing changes`
Expand Down
44 changes: 19 additions & 25 deletions src/streamstore/_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,13 +149,13 @@ class S2:

Args:
access_token: Access token generated from `S2 dashboard <https://s2.dev/dashboard>`_.
endpoints: S2 endpoints. If None, public endpoints for S2 service running in AWS cloud will be used.
request_timeout: Timeout for requests made by the client. Default value is 5 seconds.
max_retries: Maximum number of retries for a request. Default value is 3.
endpoints: S2 endpoints. If not specified, public endpoints for S2 service running in AWS cloud will be used.
request_timeout: Timeout for requests made by the client. Default value is ``5`` seconds.
max_retries: Maximum number of retries for a request. Default value is ``3``.
enable_append_retries: Enable retries for appends i.e for both :meth:`.Stream.append` and
:meth:`.Stream.append_session`. Default value is True.
:meth:`.Stream.append_session`. Default value is ``True``.
enable_compression: Enable compression (Gzip) for :meth:`.Stream.append`, :meth:`.Stream.append_session`,
:meth:`.Stream.read`, and :meth:`.Stream.read_session`. Default value is False.
:meth:`.Stream.read`, and :meth:`.Stream.read_session`. Default value is ``False``.
"""

__slots__ = (
Expand Down Expand Up @@ -249,7 +249,7 @@ async def create_basin(
config: Configuration for the basin.

Note:
**name** must be globally unique and must be between 8 and 48 characters, comprising lowercase
``name`` must be globally unique and must be between 8 and 48 characters, comprising lowercase
letters, numbers and hyphens. It cannot begin or end with a hyphen.
"""
_validate_basin(name)
Expand Down Expand Up @@ -307,18 +307,15 @@ async def list_basins(
self,
prefix: str = "",
start_after: str = "",
limit: int | None = None,
limit: int = 1000,
) -> schemas.Page[schemas.BasinInfo]:
"""
List basins.

Args:
prefix: List only those that begin with this value.
start_after: List only those that lexicographically start after this value,
which can be the name of the last item from previous page, to continue from there.
It must be greater than or equal to the prefix if specified.
limit: Number of items to return in one page. Maximum number of items that can be
returned in one page is 1000.
prefix: Filter to basins whose name begins with this prefix.
start_after: Filter to basins whose name starts lexicographically after this value.
limit: Number of items to return per page, up to a maximum of 1000.
"""
request = ListBasinsRequest(prefix=prefix, start_after=start_after, limit=limit)
response = await self._retrier(
Expand Down Expand Up @@ -424,7 +421,7 @@ async def issue_access_token(
from stream names in responses.

Note:
**id** must be unique to the account and between 1 and 96 bytes in length.
``id`` must be unique to the account and between 1 and 96 bytes in length.
"""
request = IssueAccessTokenRequest(
info=access_token_info_message(id, scope, auto_prefix_streams, expires_at)
Expand All @@ -446,8 +443,8 @@ async def list_access_tokens(

Args:
prefix: Filter to access tokens whose ID begins with this prefix.
start_after: Filter to access tokens whose ID lexicographically starts after this value.
limit: Number of results, up to a maximum of 1000.
start_after: Filter to access tokens whose ID starts lexicographically after this value.
limit: Number of items to return per page, up to a maximum of 1000.
"""
request = ListAccessTokensRequest(
prefix=prefix, start_after=start_after, limit=limit
Expand Down Expand Up @@ -536,8 +533,8 @@ async def create_stream(
config: Configuration for the stream.

Note:
**name** must be unique within the basin. It can be an arbitrary string upto 512 characters.
Backslash (`/`) is recommended as a delimiter for hierarchical naming.
``name`` must be unique within the basin. It can be an arbitrary string upto 512 characters.
Backslash (``/``) is recommended as a delimiter for hierarchical naming.
"""
request = CreateStreamRequest(
stream=name,
Expand Down Expand Up @@ -588,18 +585,15 @@ async def list_streams(
self,
prefix: str = "",
start_after: str = "",
limit: int | None = None,
limit: int = 1000,
) -> schemas.Page[schemas.StreamInfo]:
"""
List streams.

Args:
prefix: List only those that begin with this value.
start_after: List only those that lexicographically start after this value,
which can be the name of the last item from previous page, to continue from there.
It must be greater than or equal to the prefix if specified.
limit: Number of items to return in one page. Maximum number of items that can be
returned in one page is 1000.
prefix: Filter to streams whose name begins with this prefix.
start_after: Filter to streams whose name starts lexicographically after this value.
limit: Number of items to return per page, up to a maximum of 1000.
"""
request = ListStreamsRequest(
prefix=prefix, start_after=start_after, limit=limit
Expand Down
2 changes: 1 addition & 1 deletion src/streamstore/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ class Page(Generic[T]):

#: List of items of any type T.
items: list[T]
#: If True, it means that there are more pages.
#: If ``True``, it means that there are more pages.
has_more: bool


Expand Down
6 changes: 3 additions & 3 deletions src/streamstore/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def trim(desired_first_seq_num: int) -> Record:
preceeding records in the stream.

Note:
If **desired_first_seq_num** was smaller than the sequence number for the first existing
If ``desired_first_seq_num`` was smaller than the sequence number for the first existing
record in the stream, trimming doesn't happen.
"""
return Record(
Expand Down Expand Up @@ -210,8 +210,8 @@ async def append_inputs_gen(
max_linger_per_batch: Maximum duration for each batch to accumulate records before yielding.

Note:
If **max_linger_per_batch** is ``None``, :class:`.AppendInput` will be yielded only
when **max_records_per_batch** or **max_bytes_per_batch** is reached.
If ``max_linger_per_batch`` is ``None``, :class:`.AppendInput` will be yielded only
when ``max_records_per_batch`` or ``max_bytes_per_batch`` is reached.
"""
append_input_queue: Queue[AppendInput | None] = Queue()
append_input_aiter = _AppendInputAsyncIterator(append_input_queue)
Expand Down