From bab68093c3039af6c08302d4abb1c16a034b7996 Mon Sep 17 00:00:00 2001 From: quettabit Date: Mon, 16 Mar 2026 20:16:00 -0600 Subject: [PATCH] wip --- pyproject.toml | 29 +- src/s2_sdk/__init__.py | 21 + src/s2_sdk/_client.py | 103 ++ src/s2_sdk/_endpoints.py | 38 + src/{streamstore => s2_sdk}/_exceptions.py | 30 +- .../_lib => s2_sdk/_generated}/__init__.py | 0 src/s2_sdk/_generated/_models.py | 508 +++++++++ .../_lib => s2_sdk/_generated}/s2/__init__.py | 0 .../_generated/s2/v1/__init__.py} | 0 src/s2_sdk/_generated/s2/v1/s2_pb2.py | 46 + src/s2_sdk/_generated/s2/v1/s2_pb2.pyi | 106 ++ src/s2_sdk/_mappers.py | 488 ++++++++ src/s2_sdk/_ops.py | 597 ++++++++++ src/{streamstore => s2_sdk}/_retrier.py | 12 + src/s2_sdk/_s2s/__init__.py | 0 src/s2_sdk/_s2s/_append_session.py | 227 ++++ src/s2_sdk/_s2s/_compression.py | 32 + src/s2_sdk/_s2s/_protocol.py | 170 +++ src/s2_sdk/_s2s/_read_session.py | 124 ++ src/s2_sdk/_validators.py | 28 + src/s2_sdk/py.typed | 0 src/s2_sdk/types.py | 365 ++++++ src/{streamstore => s2_sdk}/utils.py | 59 +- src/streamstore/__init__.py | 11 - src/streamstore/_client.py | 1004 ----------------- src/streamstore/_lib/s2/v1alpha/s2_pb2.py | 228 ---- src/streamstore/_lib/s2/v1alpha/s2_pb2.pyi | 765 ------------- .../_lib/s2/v1alpha/s2_pb2_grpc.py | 980 ---------------- src/streamstore/_mappers.py | 361 ------ src/streamstore/schemas.py | 509 --------- tests/conftest.py | 18 +- tests/test_account_ops.py | 87 +- tests/test_basin_ops.py | 38 +- tests/test_compression.py | 38 + tests/test_mappers.py | 363 ++++++ tests/test_protocol.py | 77 ++ tests/test_stream_ops.py | 83 +- tests/test_validators.py | 61 + update_protos | 8 - update_specs | 27 + uv.lock | 643 +++++++++-- 41 files changed, 4170 insertions(+), 4114 deletions(-) create mode 100644 src/s2_sdk/__init__.py create mode 100644 src/s2_sdk/_client.py create mode 100644 src/s2_sdk/_endpoints.py rename src/{streamstore => s2_sdk}/_exceptions.py (58%) rename src/{streamstore/_lib => s2_sdk/_generated}/__init__.py (100%) create mode 100644 src/s2_sdk/_generated/_models.py rename src/{streamstore/_lib => s2_sdk/_generated}/s2/__init__.py (100%) rename src/{streamstore/py.typed => s2_sdk/_generated/s2/v1/__init__.py} (100%) create mode 100644 src/s2_sdk/_generated/s2/v1/s2_pb2.py create mode 100644 src/s2_sdk/_generated/s2/v1/s2_pb2.pyi create mode 100644 src/s2_sdk/_mappers.py create mode 100644 src/s2_sdk/_ops.py rename src/{streamstore => s2_sdk}/_retrier.py (79%) create mode 100644 src/s2_sdk/_s2s/__init__.py create mode 100644 src/s2_sdk/_s2s/_append_session.py create mode 100644 src/s2_sdk/_s2s/_compression.py create mode 100644 src/s2_sdk/_s2s/_protocol.py create mode 100644 src/s2_sdk/_s2s/_read_session.py create mode 100644 src/s2_sdk/_validators.py create mode 100644 src/s2_sdk/py.typed create mode 100644 src/s2_sdk/types.py rename src/{streamstore => s2_sdk}/utils.py (75%) delete mode 100644 src/streamstore/__init__.py delete mode 100644 src/streamstore/_client.py delete mode 100644 src/streamstore/_lib/s2/v1alpha/s2_pb2.py delete mode 100644 src/streamstore/_lib/s2/v1alpha/s2_pb2.pyi delete mode 100644 src/streamstore/_lib/s2/v1alpha/s2_pb2_grpc.py delete mode 100644 src/streamstore/_mappers.py delete mode 100644 src/streamstore/schemas.py create mode 100644 tests/test_compression.py create mode 100644 tests/test_mappers.py create mode 100644 tests/test_protocol.py create mode 100644 tests/test_validators.py delete mode 100755 update_protos create mode 100755 update_specs diff --git a/pyproject.toml b/pyproject.toml index 21c4719..9706f4b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,21 +1,28 @@ [project] -name = "streamstore" -version = "5.0.0" +name = "s2-sdk" +version = "0.1.0" description = "Python SDK for s2.dev" readme = "README.md" license = "MIT" license-files = ["LICENSE"] requires-python = ">=3.11" dependencies = [ - "grpcio-tools>=1.69.0", - "grpcio>=1.69.0", - "types-protobuf>=5.29.1.20241207", - "grpc-stubs>=1.53.0.5", + "httpx[http2]>=0.28.0", + "protobuf>=5.29.0", + "pydantic>=2.0", "anyio>=4.8.0", + "zstandard>=0.23.0", ] [dependency-groups] -dev = ["mypy>=1.14.1", "poethepoet>=0.36.0", "ruff>=0.9.1"] +dev = [ + "datamodel-code-generator>=0.28.0", + "grpcio-tools>=1.69.0", + "mypy>=1.14.1", + "poethepoet>=0.36.0", + "ruff>=0.9.1", + "types-protobuf>=5.29.1.20241207", +] test = [ "pytest>=8.0.0", "pytest-asyncio>=0.23.0", @@ -34,8 +41,12 @@ docs = [ requires = ["hatchling"] build-backend = "hatchling.build" +[tool.hatch.build.targets.wheel] +packages = ["src/s2_sdk"] + [tool.mypy] -files = ["src/", "tests/", "examples/"] +files = ["src/s2_sdk/", "tests/"] +exclude = ["src/s2_sdk/_generated/"] [tool.ruff] exclude = [ @@ -59,7 +70,7 @@ ci_linter = "uv run ruff check" ci_formatter = "uv run ruff format --check" checker = ["linter", "formatter", "type_checker"] ci_checker = ["ci_linter", "ci_formatter", "type_checker"] -e2e_tests = "uv run pytest tests/ -v -s" +e2e_tests = "uv run pytest tests/ -v -s -m 'account or basin or stream'" e2e_account_tests = "uv run pytest tests/ -v -s -m account" e2e_basin_tests = "uv run pytest tests/ -v -s -m basin" e2e_stream_tests = "uv run pytest tests/ -v -s -m stream" diff --git a/src/s2_sdk/__init__.py b/src/s2_sdk/__init__.py new file mode 100644 index 0000000..5de23ce --- /dev/null +++ b/src/s2_sdk/__init__.py @@ -0,0 +1,21 @@ +__all__ = [ + "S2", + "S2Basin", + "S2Stream", + "S2Error", + "S2ApiError", + "AppendConditionFailed", + "S2SessionError", + "Endpoints", + "s2_sdk.types", + "s2_sdk.utils", +] + +from s2_sdk._endpoints import Endpoints +from s2_sdk._exceptions import ( + AppendConditionFailed, + S2ApiError, + S2Error, + S2SessionError, +) +from s2_sdk._ops import S2, S2Basin, S2Stream diff --git a/src/s2_sdk/_client.py b/src/s2_sdk/_client.py new file mode 100644 index 0000000..d26cc2a --- /dev/null +++ b/src/s2_sdk/_client.py @@ -0,0 +1,103 @@ +from importlib.metadata import version +from typing import Any + +import httpx + +from s2_sdk._exceptions import AppendConditionFailed, S2ApiError + +_VERSION = version("s2-sdk") +_USER_AGENT = f"s2-sdk-python/{_VERSION}" + + +class HttpClient: + __slots__ = ("_client",) + + def __init__( + self, + base_url: str, + access_token: str, + timeout: float, + http2: bool = True, + ) -> None: + self._client = httpx.AsyncClient( + base_url=base_url, + headers={ + "authorization": f"Bearer {access_token}", + "user-agent": _USER_AGENT, + }, + timeout=timeout, + http2=http2, + ) + + async def request( + self, + method: str, + path: str, + *, + json: Any = None, + params: dict[str, Any] | None = None, + headers: dict[str, str] | None = None, + content: bytes | None = None, + ) -> httpx.Response: + response = await self._client.request( + method, + path, + json=json, + params=params, + headers=headers, + content=content, + ) + _raise_for_status(response) + return response + + def stream( + self, + method: str, + path: str, + *, + params: dict[str, Any] | None = None, + headers: dict[str, str] | None = None, + ): + return self._client.stream( + method, + path, + params=params, + headers=headers, + ) + + async def close(self) -> None: + await self._client.aclose() + + +def _raise_for_status(response: httpx.Response) -> None: + status = response.status_code + if 200 <= status < 300: + return + + if status == 412: + body = response.json() + if "fencing_token_mismatch" in body: + raise AppendConditionFailed( + f"Fencing token mismatch: {body['fencing_token_mismatch']}", + status_code=status, + ) + elif "seq_num_mismatch" in body: + raise AppendConditionFailed( + f"Sequence number mismatch: {body['seq_num_mismatch']}", + status_code=status, + ) + raise AppendConditionFailed(str(body), status_code=status) + + if status == 416: + # Tail response — not an error, handled by callers + return + + try: + body = response.json() + message = body.get("message", response.text) + code = body.get("code") + except Exception: + message = response.text + code = None + + raise S2ApiError(message, status_code=status, code=code) diff --git a/src/s2_sdk/_endpoints.py b/src/s2_sdk/_endpoints.py new file mode 100644 index 0000000..d7e10f7 --- /dev/null +++ b/src/s2_sdk/_endpoints.py @@ -0,0 +1,38 @@ +import os + +from s2_sdk._exceptions import fallible + + +class Endpoints: + """S2 endpoints.""" + + __slots__ = ("_account_url", "_basin_base_url") + + _account_url: str + _basin_base_url: str + + def __init__(self, account_url: str, basin_base_url: str): + self._account_url = account_url + self._basin_base_url = basin_base_url + + @classmethod + def default(cls) -> "Endpoints": + return cls( + account_url="https://aws.s2.dev/v1", + basin_base_url="https://{basin}.b.aws.s2.dev/v1", + ) + + @classmethod + @fallible + def from_env(cls) -> "Endpoints": + account_url = os.getenv("S2_ACCOUNT_ENDPOINT") + basin_url = os.getenv("S2_BASIN_ENDPOINT") + if account_url and basin_url and "{basin}" in basin_url: + return cls(account_url=account_url, basin_base_url=basin_url) + raise ValueError("Invalid S2_ACCOUNT_ENDPOINT and/or S2_BASIN_ENDPOINT") + + def account(self) -> str: + return self._account_url + + def basin(self, basin_name: str) -> str: + return self._basin_base_url.format(basin=basin_name) diff --git a/src/streamstore/_exceptions.py b/src/s2_sdk/_exceptions.py similarity index 58% rename from src/streamstore/_exceptions.py rename to src/s2_sdk/_exceptions.py index 76d6802..f10a65f 100644 --- a/src/streamstore/_exceptions.py +++ b/src/s2_sdk/_exceptions.py @@ -3,12 +3,34 @@ class S2Error(Exception): - """ - Base class for all S2 related exceptions. - """ + """Base class for all S2 related exceptions.""" -S2Error.__module__ = "streamstore" +class S2ApiError(S2Error): + """Error from the S2 API.""" + + def __init__(self, message: str, status_code: int, code: str | None = None): + self.status_code = status_code + self.code = code + super().__init__(message) + + +class AppendConditionFailed(S2ApiError): + """Append condition (fencing token or seq num match) was not met.""" + + +class S2SessionError(S2Error): + """Error from an S2S session.""" + + def __init__(self, message: str, status_code: int): + self.status_code = status_code + super().__init__(message) + + +S2Error.__module__ = "s2_sdk" +S2ApiError.__module__ = "s2_sdk" +AppendConditionFailed.__module__ = "s2_sdk" +S2SessionError.__module__ = "s2_sdk" def fallible(f): diff --git a/src/streamstore/_lib/__init__.py b/src/s2_sdk/_generated/__init__.py similarity index 100% rename from src/streamstore/_lib/__init__.py rename to src/s2_sdk/_generated/__init__.py diff --git a/src/s2_sdk/_generated/_models.py b/src/s2_sdk/_generated/_models.py new file mode 100644 index 0000000..d517930 --- /dev/null +++ b/src/s2_sdk/_generated/_models.py @@ -0,0 +1,508 @@ +# generated by datamodel-codegen: +# filename: openapi.json +# timestamp: 2026-03-16T22:27:04+00:00 + +from __future__ import annotations + +from datetime import time +from enum import StrEnum +from typing import Any + +from pydantic import BaseModel, Field, RootModel, conint, constr + + +class AccountMetricSet(StrEnum): + active_basins = "active-basins" + account_ops = "account-ops" + + +class AppendConditionFailed1(BaseModel): + fencing_token_mismatch: str = Field( + ..., + description="Fencing token did not match.\nThe expected fencing token is returned.", + ) + + +class AppendConditionFailed2(BaseModel): + seq_num_mismatch: conint(ge=0) = Field( + ..., + description="Sequence number did not match the tail of the stream.\nThe expected next sequence number is returned.", + ) + + +class AppendConditionFailed(RootModel[AppendConditionFailed1 | AppendConditionFailed2]): + root: AppendConditionFailed1 | AppendConditionFailed2 = Field( + ..., description="Aborted due to a failed condition." + ) + + +class BasinMetricSet(StrEnum): + storage = "storage" + append_ops = "append-ops" + read_ops = "read-ops" + read_throughput = "read-throughput" + append_throughput = "append-throughput" + basin_ops = "basin-ops" + + +class BasinScope(StrEnum): + aws_us_east_1 = "aws:us-east-1" + + +class BasinState(StrEnum): + active = "active" + creating = "creating" + deleting = "deleting" + + +class DeleteOnEmptyConfig(BaseModel): + min_age_secs: conint(ge=0) | None = Field( + None, + description="Minimum age in seconds before an empty stream can be deleted.\nSet to 0 (default) to disable delete-on-empty (don't delete automatically).", + ) + + +class DeleteOnEmptyReconfiguration(BaseModel): + min_age_secs: conint(ge=0) | None = Field( + None, + description="Minimum age in seconds before an empty stream can be deleted.\nSet to 0 to disable delete-on-empty (don't delete automatically).", + ) + + +class ErrorResponse(BaseModel): + code: str | None = None + message: str + + +class Header(RootModel[list[str]]): + root: list[str] = Field( + ..., + description="Headers add structured information to a record as name-value pairs.\n\nThe name cannot be empty, with the exception of an S2 command record.", + max_length=2, + min_length=2, + ) + + +class InfiniteRetention(BaseModel): + pass + + +class IssueAccessTokenResponse(BaseModel): + access_token: str = Field(..., description="Created access token.") + + +class Label(BaseModel): + name: str = Field(..., description="Label name.") + values: list[str] = Field(..., description="Label values.") + + +class Metric4(BaseModel): + label: Label = Field(..., description="Set of string labels.") + + +class MetricUnit(StrEnum): + bytes = "bytes" + operations = "operations" + + +class Operation(StrEnum): + list_basins = "list-basins" + create_basin = "create-basin" + delete_basin = "delete-basin" + reconfigure_basin = "reconfigure-basin" + get_basin_config = "get-basin-config" + issue_access_token = "issue-access-token" + revoke_access_token = "revoke-access-token" + list_access_tokens = "list-access-tokens" + list_streams = "list-streams" + create_stream = "create-stream" + delete_stream = "delete-stream" + get_stream_config = "get-stream-config" + reconfigure_stream = "reconfigure-stream" + check_tail = "check-tail" + append = "append" + read = "read" + trim = "trim" + fence = "fence" + account_metrics = "account-metrics" + basin_metrics = "basin-metrics" + stream_metrics = "stream-metrics" + + +class PingEventData(BaseModel): + timestamp: conint(ge=0) + + +class Event(StrEnum): + batch = "batch" + + +class Event1(StrEnum): + error = "error" + + +class ReadEvent2(BaseModel): + data: str + event: Event1 + + +class Event2(StrEnum): + ping = "ping" + + +class ReadEvent3(BaseModel): + data: PingEventData + event: Event2 + + +class ReadWritePermissions(BaseModel): + read: bool | None = Field(False, description="Read permission.") + write: bool | None = Field(False, description="Write permission.") + + +class ResourceSet1(BaseModel): + exact: str = Field( + ..., + description="Match only the resource with this exact name.\nUse an empty string to match no resources.", + ) + + +class ResourceSet2(BaseModel): + prefix: str = Field( + ..., + description="Match all resources that start with this prefix.\nUse an empty string to match all resource.", + ) + + +class ResourceSet(RootModel[ResourceSet1 | ResourceSet2]): + root: ResourceSet1 | ResourceSet2 + + +class RetentionPolicy1(BaseModel): + age: conint(ge=0) = Field( + ..., + description="Age in seconds for automatic trimming of records older than this threshold.\nIf this is set to 0, the stream will have infinite retention.\n(While S2 is in public preview, this is capped at 28 days. Let us know if you'd like the cap removed.)", + ) + + +class RetentionPolicy2(BaseModel): + infinite: InfiniteRetention = Field( + ..., description="Retain records unless explicitly trimmed." + ) + + +class RetentionPolicy(RootModel[RetentionPolicy1 | RetentionPolicy2]): + root: RetentionPolicy1 | RetentionPolicy2 + + +class S2Format(StrEnum): + raw = "raw" + base64 = "base64" + + +class Scalar(BaseModel): + name: str = Field(..., description="Metric name.") + unit: MetricUnit = Field(..., description="Unit of the metric.") + value: float = Field(..., description="Metric value.") + + +class StorageClass(StrEnum): + standard = "standard" + express = "express" + + +class StreamInfo(BaseModel): + created_at: time = Field(..., description="Creation time in ISO 8601 format.") + deleted_at: time | None = Field( + None, + description="Deletion time in ISO 8601 format, if the stream is being deleted.", + ) + name: str = Field(..., description="Stream name.") + + +class StreamMetricSet(StrEnum): + storage = "storage" + + +class StreamPosition(BaseModel): + seq_num: conint(ge=0) = Field( + ..., description="Sequence number assigned by the service." + ) + timestamp: conint(ge=0) = Field( + ..., + description="Timestamp, which may be client-specified or assigned by the service.\nIf it is assigned by the service, it will represent milliseconds since Unix epoch.", + ) + + +class TailResponse(BaseModel): + tail: StreamPosition = Field( + ..., + description="Sequence number that will be assigned to the next record on the stream, and timestamp of the last record.", + ) + + +class TimeseriesInterval(StrEnum): + minute = "minute" + hour = "hour" + day = "day" + + +class TimestampingMode(StrEnum): + client_prefer = "client-prefer" + client_require = "client-require" + arrival = "arrival" + + +class TimestampingReconfiguration(BaseModel): + mode: TimestampingMode | None = None + uncapped: bool | None = Field( + None, + description="Allow client-specified timestamps to exceed the arrival time.", + ) + + +class U64(RootModel[conint(ge=0)]): + root: conint(ge=0) + + +class Accumulation(BaseModel): + bucket_length: TimeseriesInterval = Field( + ..., description="The duration of bucket for the accumulation." + ) + name: str = Field(..., description="Timeseries name.") + unit: MetricUnit = Field(..., description="Unit of the metric.") + values: list[list[Any]] = Field( + ..., + description="Timeseries values.\nEach element is a tuple of a timestamp in Unix epoch seconds and a data point.\nThe data point represents the accumulated value for a bucket of time starting at the provided timestamp, lasting for the duration of the `bucket_length` parameter.", + ) + + +class AppendAck(BaseModel): + end: StreamPosition = Field( + ..., + description="Sequence number of the last record that was appended `+ 1`, and timestamp of the last record that was appended.\nThe difference between `end.seq_num` and `start.seq_num` will be the number of records appended.", + ) + start: StreamPosition = Field( + ..., + description="Sequence number and timestamp of the first record that was appended.", + ) + tail: StreamPosition = Field( + ..., + description="Sequence number that will be assigned to the next record on the stream, and timestamp of the last record on the stream.\nThis can be greater than the `end` position in case of concurrent appends.", + ) + + +class AppendRecord(BaseModel): + body: str | None = Field(None, description="Body of the record.") + headers: list[Header] | None = Field( + None, description="Series of name-value pairs for this record." + ) + timestamp: U64 | None = None + + +class BasinInfo(BaseModel): + name: str = Field(..., description="Basin name.") + scope: BasinScope = Field(..., description="Basin scope.") + state: BasinState = Field(..., description="Basin state.") + + +class Gauge(BaseModel): + name: str = Field(..., description="Timeseries name.") + unit: MetricUnit = Field(..., description="Unit of the metric.") + values: list[list[Any]] = Field( + ..., + description="Timeseries values.\nEach element is a tuple of a timestamp in Unix epoch seconds and a data point.\nThe data point represents the value at the instant of the timestamp.", + ) + + +class ListBasinsResponse(BaseModel): + basins: list[BasinInfo] = Field( + ..., description="Matching basins.", max_length=1000 + ) + has_more: bool = Field( + ..., description="Indicates that there are more basins that match the criteria." + ) + + +class ListStreamsResponse(BaseModel): + has_more: bool = Field( + ..., + description="Indicates that there are more results that match the criteria.", + ) + streams: list[StreamInfo] = Field( + ..., description="Matching streams.", max_length=1000 + ) + + +class Metric1(BaseModel): + scalar: Scalar = Field(..., description="Single named value.") + + +class Metric2(BaseModel): + accumulation: Accumulation = Field( + ..., + description="Named series of `(timestamp, value)` points representing an accumulation over a specified\nbucket.", + ) + + +class Metric3(BaseModel): + gauge: Gauge = Field( + ..., + description="Named series of `(timestamp, value)` points each representing an instantaneous value.", + ) + + +class Metric(RootModel[Metric1 | Metric2 | Metric3 | Metric4]): + root: Metric1 | Metric2 | Metric3 | Metric4 + + +class MetricSetResponse(BaseModel): + values: list[Metric] = Field(..., description="Metrics comprising the set.") + + +class PermittedOperationGroups(BaseModel): + account: ReadWritePermissions | None = None + basin: ReadWritePermissions | None = None + stream: ReadWritePermissions | None = None + + +class SequencedRecord(BaseModel): + body: str | None = Field(None, description="Body of the record.") + headers: list[Header] | None = Field( + None, description="Series of name-value pairs for this record." + ) + seq_num: U64 = Field(..., description="Sequence number assigned by the service.") + timestamp: U64 = Field(..., description="Timestamp for this record.") + + +class StreamReconfiguration(BaseModel): + delete_on_empty: DeleteOnEmptyReconfiguration | None = None + retention_policy: RetentionPolicy | None = None + storage_class: StorageClass | None = None + timestamping: TimestampingReconfiguration | None = None + + +class TimestampingConfig(BaseModel): + mode: TimestampingMode | None = None + uncapped: bool | None = Field( + None, + description="Allow client-specified timestamps to exceed the arrival time.\nIf this is `false` or not set, client timestamps will be capped at the arrival time.", + ) + + +class AccessTokenScope(BaseModel): + access_tokens: ResourceSet | None = None + basins: ResourceSet | None = None + op_groups: PermittedOperationGroups | None = None + ops: list[Operation] | None = Field( + None, + description="Operations allowed for the token.\nA union of allowed operations and groups is used as an effective set of allowed operations.", + ) + streams: ResourceSet | None = None + + +class AppendInput(BaseModel): + fencing_token: str | None = Field( + None, + description="Enforce a fencing token, which starts out as an empty string that can be overridden by a `fence` command record.", + ) + match_seq_num: U64 | None = None + records: list[AppendRecord] = Field( + ..., + description="Batch of records to append atomically, which must contain at least one record, and no more than 1000.\nThe total size of a batch of records may not exceed 1 MiB of metered bytes.", + ) + + +class BasinReconfiguration(BaseModel): + create_stream_on_append: bool | None = Field( + None, description="Create a stream on append." + ) + create_stream_on_read: bool | None = Field( + None, description="Create a stream on read." + ) + default_stream_config: StreamReconfiguration | None = None + + +class ReadBatch(BaseModel): + records: list[SequencedRecord] = Field( + ..., + description="Records that are durably sequenced on the stream, retrieved based on the requested criteria.\nThis can only be empty in response to a regular (non-SSE) read, if the request cannot be satisfied without violating an explicit limit.", + ) + tail: StreamPosition | None = None + + +class ReadEvent1(BaseModel): + data: ReadBatch + event: Event + id: constr(pattern=r"^[0-9]+,[0-9]+,[0-9]+$") + + +class ReadEvent(RootModel[ReadEvent1 | ReadEvent2 | ReadEvent3]): + root: ReadEvent1 | ReadEvent2 | ReadEvent3 + + +class StreamConfig(BaseModel): + delete_on_empty: DeleteOnEmptyConfig | None = None + retention_policy: RetentionPolicy | None = None + storage_class: StorageClass | None = None + timestamping: TimestampingConfig | None = None + + +class AccessTokenInfo(BaseModel): + auto_prefix_streams: bool | None = Field( + False, + description="Namespace streams based on the configured stream-level scope, which must be a prefix.\nStream name arguments will be automatically prefixed, and the prefix will be stripped when listing streams.", + ) + expires_at: time | None = Field( + None, + description="Expiration time in ISO 8601 format.\nIf not set, the expiration will be set to that of the requestor's token.", + ) + id: str = Field( + ..., + description="Access token ID.\nIt must be unique to the account and between 1 and 96 bytes in length.", + ) + scope: AccessTokenScope = Field(..., description="Access token scope.") + + +class BasinConfig(BaseModel): + create_stream_on_append: bool | None = Field( + None, + description="Create stream on append if it doesn't exist, using the default stream configuration.", + ) + create_stream_on_read: bool | None = Field( + None, + description="Create stream on read if it doesn't exist, using the default stream configuration.", + ) + default_stream_config: StreamConfig | None = None + + +class CreateBasinRequest(BaseModel): + basin: str = Field( + ..., + description="Basin name which must be globally unique.\nIt can be between 8 and 48 characters in length, and comprise lowercase letters, numbers and hyphens.\nIt cannot begin or end with a hyphen.", + ) + config: BasinConfig | None = None + scope: BasinScope | None = "aws:us-east-1" + + +class CreateOrReconfigureBasinRequest(BaseModel): + config: BasinConfig | None = None + scope: BasinScope | None = "aws:us-east-1" + + +class CreateStreamRequest(BaseModel): + config: StreamConfig | None = None + stream: str = Field( + ..., + description="Stream name that is unique to the basin.\nIt can be between 1 and 512 bytes in length.", + ) + + +class ListAccessTokensResponse(BaseModel): + access_tokens: list[AccessTokenInfo] = Field( + ..., description="Matching access tokens.", max_length=1000 + ) + has_more: bool = Field( + ..., + description="Indicates that there are more access tokens that match the criteria.", + ) diff --git a/src/streamstore/_lib/s2/__init__.py b/src/s2_sdk/_generated/s2/__init__.py similarity index 100% rename from src/streamstore/_lib/s2/__init__.py rename to src/s2_sdk/_generated/s2/__init__.py diff --git a/src/streamstore/py.typed b/src/s2_sdk/_generated/s2/v1/__init__.py similarity index 100% rename from src/streamstore/py.typed rename to src/s2_sdk/_generated/s2/v1/__init__.py diff --git a/src/s2_sdk/_generated/s2/v1/s2_pb2.py b/src/s2_sdk/_generated/s2/v1/s2_pb2.py new file mode 100644 index 0000000..a2ce856 --- /dev/null +++ b/src/s2_sdk/_generated/s2/v1/s2_pb2.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: s2/v1/s2.proto +# Protobuf Python Version: 5.29.0 +"""Generated protocol buffer code.""" + +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, 5, 29, 0, "", "s2/v1/s2.proto" +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x0es2/v1/s2.proto\x12\x05s2.v1"4\n\x0eStreamPosition\x12\x0f\n\x07seq_num\x18\x01 \x01(\x04\x12\x11\n\ttimestamp\x18\x02 \x01(\x04"%\n\x06Header\x12\x0c\n\x04name\x18\x01 \x01(\x0c\x12\r\n\x05value\x18\x02 \x01(\x0c"b\n\x0c\x41ppendRecord\x12\x16\n\ttimestamp\x18\x01 \x01(\x04H\x00\x88\x01\x01\x12\x1e\n\x07headers\x18\x02 \x03(\x0b\x32\r.s2.v1.Header\x12\x0c\n\x04\x62ody\x18\x03 \x01(\x0c\x42\x0c\n\n_timestamp"\x8f\x01\n\x0b\x41ppendInput\x12$\n\x07records\x18\x01 \x03(\x0b\x32\x13.s2.v1.AppendRecord\x12\x1a\n\rmatch_seq_num\x18\x02 \x01(\x04H\x00\x88\x01\x01\x12\x1a\n\rfencing_token\x18\x03 \x01(\tH\x01\x88\x01\x01\x42\x10\n\x0e_match_seq_numB\x10\n\x0e_fencing_token"z\n\tAppendAck\x12$\n\x05start\x18\x01 \x01(\x0b\x32\x15.s2.v1.StreamPosition\x12"\n\x03\x65nd\x18\x02 \x01(\x0b\x32\x15.s2.v1.StreamPosition\x12#\n\x04tail\x18\x03 \x01(\x0b\x32\x15.s2.v1.StreamPosition"c\n\x0fSequencedRecord\x12\x0f\n\x07seq_num\x18\x01 \x01(\x04\x12\x11\n\ttimestamp\x18\x02 \x01(\x04\x12\x1e\n\x07headers\x18\x03 \x03(\x0b\x32\r.s2.v1.Header\x12\x0c\n\x04\x62ody\x18\x04 \x01(\x0c"g\n\tReadBatch\x12\'\n\x07records\x18\x01 \x03(\x0b\x32\x16.s2.v1.SequencedRecord\x12(\n\x04tail\x18\x02 \x01(\x0b\x32\x15.s2.v1.StreamPositionH\x00\x88\x01\x01\x42\x07\n\x05_tailB\t\n\x05s2.v1P\x01\x62\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "s2.v1.s2_pb2", _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals["DESCRIPTOR"]._loaded_options = None + _globals["DESCRIPTOR"]._serialized_options = b"\n\005s2.v1P\001" + _globals["_STREAMPOSITION"]._serialized_start = 25 + _globals["_STREAMPOSITION"]._serialized_end = 77 + _globals["_HEADER"]._serialized_start = 79 + _globals["_HEADER"]._serialized_end = 116 + _globals["_APPENDRECORD"]._serialized_start = 118 + _globals["_APPENDRECORD"]._serialized_end = 216 + _globals["_APPENDINPUT"]._serialized_start = 219 + _globals["_APPENDINPUT"]._serialized_end = 362 + _globals["_APPENDACK"]._serialized_start = 364 + _globals["_APPENDACK"]._serialized_end = 486 + _globals["_SEQUENCEDRECORD"]._serialized_start = 488 + _globals["_SEQUENCEDRECORD"]._serialized_end = 587 + _globals["_READBATCH"]._serialized_start = 589 + _globals["_READBATCH"]._serialized_end = 692 +# @@protoc_insertion_point(module_scope) diff --git a/src/s2_sdk/_generated/s2/v1/s2_pb2.pyi b/src/s2_sdk/_generated/s2/v1/s2_pb2.pyi new file mode 100644 index 0000000..de57f04 --- /dev/null +++ b/src/s2_sdk/_generated/s2/v1/s2_pb2.pyi @@ -0,0 +1,106 @@ +from typing import ClassVar as _ClassVar +from typing import Iterable as _Iterable +from typing import Mapping as _Mapping +from typing import Optional as _Optional +from typing import Union as _Union + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf.internal import containers as _containers + +DESCRIPTOR: _descriptor.FileDescriptor + +class StreamPosition(_message.Message): + __slots__ = ("seq_num", "timestamp") + SEQ_NUM_FIELD_NUMBER: _ClassVar[int] + TIMESTAMP_FIELD_NUMBER: _ClassVar[int] + seq_num: int + timestamp: int + def __init__( + self, seq_num: _Optional[int] = ..., timestamp: _Optional[int] = ... + ) -> None: ... + +class Header(_message.Message): + __slots__ = ("name", "value") + NAME_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + name: bytes + value: bytes + def __init__( + self, name: _Optional[bytes] = ..., value: _Optional[bytes] = ... + ) -> None: ... + +class AppendRecord(_message.Message): + __slots__ = ("timestamp", "headers", "body") + TIMESTAMP_FIELD_NUMBER: _ClassVar[int] + HEADERS_FIELD_NUMBER: _ClassVar[int] + BODY_FIELD_NUMBER: _ClassVar[int] + timestamp: int + headers: _containers.RepeatedCompositeFieldContainer[Header] + body: bytes + def __init__( + self, + timestamp: _Optional[int] = ..., + headers: _Optional[_Iterable[_Union[Header, _Mapping]]] = ..., + body: _Optional[bytes] = ..., + ) -> None: ... + +class AppendInput(_message.Message): + __slots__ = ("records", "match_seq_num", "fencing_token") + RECORDS_FIELD_NUMBER: _ClassVar[int] + MATCH_SEQ_NUM_FIELD_NUMBER: _ClassVar[int] + FENCING_TOKEN_FIELD_NUMBER: _ClassVar[int] + records: _containers.RepeatedCompositeFieldContainer[AppendRecord] + match_seq_num: int + fencing_token: str + def __init__( + self, + records: _Optional[_Iterable[_Union[AppendRecord, _Mapping]]] = ..., + match_seq_num: _Optional[int] = ..., + fencing_token: _Optional[str] = ..., + ) -> None: ... + +class AppendAck(_message.Message): + __slots__ = ("start", "end", "tail") + START_FIELD_NUMBER: _ClassVar[int] + END_FIELD_NUMBER: _ClassVar[int] + TAIL_FIELD_NUMBER: _ClassVar[int] + start: StreamPosition + end: StreamPosition + tail: StreamPosition + def __init__( + self, + start: _Optional[_Union[StreamPosition, _Mapping]] = ..., + end: _Optional[_Union[StreamPosition, _Mapping]] = ..., + tail: _Optional[_Union[StreamPosition, _Mapping]] = ..., + ) -> None: ... + +class SequencedRecord(_message.Message): + __slots__ = ("seq_num", "timestamp", "headers", "body") + SEQ_NUM_FIELD_NUMBER: _ClassVar[int] + TIMESTAMP_FIELD_NUMBER: _ClassVar[int] + HEADERS_FIELD_NUMBER: _ClassVar[int] + BODY_FIELD_NUMBER: _ClassVar[int] + seq_num: int + timestamp: int + headers: _containers.RepeatedCompositeFieldContainer[Header] + body: bytes + def __init__( + self, + seq_num: _Optional[int] = ..., + timestamp: _Optional[int] = ..., + headers: _Optional[_Iterable[_Union[Header, _Mapping]]] = ..., + body: _Optional[bytes] = ..., + ) -> None: ... + +class ReadBatch(_message.Message): + __slots__ = ("records", "tail") + RECORDS_FIELD_NUMBER: _ClassVar[int] + TAIL_FIELD_NUMBER: _ClassVar[int] + records: _containers.RepeatedCompositeFieldContainer[SequencedRecord] + tail: StreamPosition + def __init__( + self, + records: _Optional[_Iterable[_Union[SequencedRecord, _Mapping]]] = ..., + tail: _Optional[_Union[StreamPosition, _Mapping]] = ..., + ) -> None: ... diff --git a/src/s2_sdk/_mappers.py b/src/s2_sdk/_mappers.py new file mode 100644 index 0000000..30cbdcf --- /dev/null +++ b/src/s2_sdk/_mappers.py @@ -0,0 +1,488 @@ +"""Mapping between SDK types and wire types (both pydantic/JSON and protobuf).""" + +from base64 import b64decode, b64encode +from datetime import datetime +from typing import Any, Literal + +import s2_sdk._generated.s2.v1.s2_pb2 as pb +from s2_sdk.types import ( + AccessTokenInfo, + AccessTokenScope, + Accumulation, + AppendInput, + AppendOutput, + BasinConfig, + BasinInfo, + BasinScope, + BasinState, + ExactMatch, + Gauge, + Label, + MetricUnit, + OperationGroupPermissions, + Permission, + PrefixMatch, + ReadLimit, + Record, + Scalar, + SeqNum, + SequencedRecord, + StorageClass, + StreamConfig, + StreamInfo, + StreamPosition, + Tail, + TailOffset, + TimeseriesInterval, + Timestamp, + Timestamping, + TimestampingMode, +) + +_ReadStart = SeqNum | Timestamp | TailOffset + + +# ──────────────────────────── Control plane (JSON) ──────────────────────────── + + +def basin_config_to_wire(config: BasinConfig | None) -> dict[str, Any] | None: + if config is None: + return None + result: dict[str, Any] = {} + if config.default_stream_config is not None: + result["default_stream_config"] = stream_config_to_wire( + config.default_stream_config + ) + if config.create_stream_on_append is not None: + result["create_stream_on_append"] = config.create_stream_on_append + if config.create_stream_on_read is not None: + result["create_stream_on_read"] = config.create_stream_on_read + return result + + +def basin_config_from_wire(data: dict[str, Any]) -> BasinConfig: + dsc = data.get("default_stream_config") + return BasinConfig( + default_stream_config=stream_config_from_wire(dsc) if dsc else None, + create_stream_on_append=data.get("create_stream_on_append"), + create_stream_on_read=data.get("create_stream_on_read"), + ) + + +def basin_reconfiguration_to_wire(config: BasinConfig) -> dict[str, Any]: + result: dict[str, Any] = {} + if config.default_stream_config is not None: + result["default_stream_config"] = stream_reconfiguration_to_wire( + config.default_stream_config + ) + if config.create_stream_on_append is not None: + result["create_stream_on_append"] = config.create_stream_on_append + if config.create_stream_on_read is not None: + result["create_stream_on_read"] = config.create_stream_on_read + return result + + +def stream_config_to_wire(config: StreamConfig | None) -> dict[str, Any] | None: + if config is None: + return None + result: dict[str, Any] = {} + if config.storage_class is not None: + result["storage_class"] = config.storage_class.value + if config.retention_policy is not None: + result["retention_policy"] = _retention_policy_to_wire(config.retention_policy) + if config.timestamping is not None: + ts: dict[str, Any] = {} + if config.timestamping.mode is not None: + ts["mode"] = config.timestamping.mode.value + if config.timestamping.uncapped is not None: + ts["uncapped"] = config.timestamping.uncapped + result["timestamping"] = ts + if config.delete_on_empty_min_age is not None: + result["delete_on_empty"] = {"min_age_secs": config.delete_on_empty_min_age} + return result + + +def stream_reconfiguration_to_wire(config: StreamConfig) -> dict[str, Any]: + result: dict[str, Any] = {} + if config.storage_class is not None: + result["storage_class"] = config.storage_class.value + if config.retention_policy is not None: + result["retention_policy"] = _retention_policy_to_wire(config.retention_policy) + if config.timestamping is not None: + ts: dict[str, Any] = {} + if config.timestamping.mode is not None: + ts["mode"] = config.timestamping.mode.value + if config.timestamping.uncapped is not None: + ts["uncapped"] = config.timestamping.uncapped + result["timestamping"] = ts + if config.delete_on_empty_min_age is not None: + result["delete_on_empty"] = {"min_age_secs": config.delete_on_empty_min_age} + return result + + +def stream_config_from_wire(data: dict[str, Any]) -> StreamConfig: + retention_policy: int | Literal["infinite"] | None = None + rp = data.get("retention_policy") + if rp is not None: + retention_policy = _retention_policy_from_wire(rp) + + timestamping = None + ts = data.get("timestamping") + if ts is not None: + mode_val = ts.get("mode") + timestamping = Timestamping( + mode=TimestampingMode(mode_val) if mode_val else None, + uncapped=ts.get("uncapped"), + ) + + doe = data.get("delete_on_empty") + delete_on_empty_min_age = doe.get("min_age_secs") if doe else None + + sc = data.get("storage_class") + return StreamConfig( + storage_class=StorageClass(sc) if sc else None, + retention_policy=retention_policy, + timestamping=timestamping, + delete_on_empty_min_age=delete_on_empty_min_age, + ) + + +def _retention_policy_to_wire(rp: int | Literal["infinite"]) -> dict[str, Any]: + if rp == "infinite": + return {"infinite": {}} + return {"age": rp} + + +def _retention_policy_from_wire(data: dict[str, Any]) -> int | Literal["infinite"]: + if "infinite" in data: + return "infinite" + return data["age"] + + +def basin_info_from_wire(data: dict[str, Any]) -> BasinInfo: + return BasinInfo( + name=data["name"], + scope=BasinScope(data["scope"]), + state=BasinState(data["state"]), + ) + + +def stream_info_from_wire(data: dict[str, Any]) -> StreamInfo: + created_at = datetime.fromisoformat(data["created_at"]) + deleted_at_str = data.get("deleted_at") + deleted_at = datetime.fromisoformat(deleted_at_str) if deleted_at_str else None + return StreamInfo( + name=data["name"], + created_at=created_at, + deleted_at=deleted_at, + ) + + +def _resource_set_to_wire( + rs: ExactMatch | PrefixMatch | None, +) -> dict[str, str] | None: + if rs is None: + return None + if isinstance(rs, ExactMatch): + return {"exact": rs.value} + return {"prefix": rs.value} + + +def _resource_set_from_wire( + data: dict[str, Any] | None, +) -> ExactMatch | PrefixMatch | None: + if data is None: + return None + if "exact" in data: + return ExactMatch(data["exact"]) + if "prefix" in data: + return PrefixMatch(data["prefix"]) + return None + + +def _rw_perms_to_wire(perm: Permission | None) -> dict[str, bool] | None: + if perm is None: + return None + match perm: + case Permission.READ: + return {"read": True} + case Permission.WRITE: + return {"write": True} + case Permission.READ_WRITE: + return {"read": True, "write": True} + + +def _rw_perms_from_wire(data: dict[str, Any] | None) -> Permission | None: + if data is None: + return None + read = data.get("read", False) + write = data.get("write", False) + if read and write: + return Permission.READ_WRITE + elif read: + return Permission.READ + elif write: + return Permission.WRITE + return None + + +def access_token_info_to_wire( + id: str, + scope: AccessTokenScope, + auto_prefix_streams: bool, + expires_at: str | None, +) -> dict[str, Any]: + scope_wire: dict[str, Any] = {} + if scope.basins is not None: + scope_wire["basins"] = _resource_set_to_wire(scope.basins) + if scope.streams is not None: + scope_wire["streams"] = _resource_set_to_wire(scope.streams) + if scope.access_tokens is not None: + scope_wire["access_tokens"] = _resource_set_to_wire(scope.access_tokens) + if scope.op_groups is not None: + og: dict[str, Any] = {} + if scope.op_groups.account is not None: + og["account"] = _rw_perms_to_wire(scope.op_groups.account) + if scope.op_groups.basin is not None: + og["basin"] = _rw_perms_to_wire(scope.op_groups.basin) + if scope.op_groups.stream is not None: + og["stream"] = _rw_perms_to_wire(scope.op_groups.stream) + scope_wire["op_groups"] = og + if scope.ops: + scope_wire["ops"] = [op.value for op in scope.ops] + + result: dict[str, Any] = { + "id": id, + "scope": scope_wire, + "auto_prefix_streams": auto_prefix_streams, + } + if expires_at is not None: + result["expires_at"] = expires_at + return result + + +def access_token_info_from_wire(data: dict[str, Any]) -> AccessTokenInfo: + scope_data = data["scope"] + og_data = scope_data.get("op_groups") + op_groups = None + if og_data: + op_groups = OperationGroupPermissions( + account=_rw_perms_from_wire(og_data.get("account")), + basin=_rw_perms_from_wire(og_data.get("basin")), + stream=_rw_perms_from_wire(og_data.get("stream")), + ) + + from s2_sdk.types import Operation + + ops_data = scope_data.get("ops") + ops = [Operation(op) for op in ops_data] if ops_data else [] + + return AccessTokenInfo( + id=data["id"], + scope=AccessTokenScope( + basins=_resource_set_from_wire(scope_data.get("basins")), + streams=_resource_set_from_wire(scope_data.get("streams")), + access_tokens=_resource_set_from_wire(scope_data.get("access_tokens")), + op_groups=op_groups, + ops=ops, + ), + expires_at=data.get("expires_at"), + auto_prefix_streams=data.get("auto_prefix_streams", False), + ) + + +def metric_set_from_wire( + data: dict[str, Any], +) -> list[Scalar | Accumulation | Gauge | Label]: + result: list[Scalar | Accumulation | Gauge | Label] = [] + for metric in data.get("values", []): + if "scalar" in metric: + s = metric["scalar"] + result.append( + Scalar( + name=s["name"], + unit=MetricUnit(s["unit"]), + value=s["value"], + ) + ) + elif "accumulation" in metric: + a = metric["accumulation"] + result.append( + Accumulation( + name=a["name"], + unit=MetricUnit(a["unit"]), + bucket_length=TimeseriesInterval(a["bucket_length"]), + values=[(int(v[0]), float(v[1])) for v in a["values"]], + ) + ) + elif "gauge" in metric: + g = metric["gauge"] + result.append( + Gauge( + name=g["name"], + unit=MetricUnit(g["unit"]), + values=[(int(v[0]), float(v[1])) for v in g["values"]], + ) + ) + elif "label" in metric: + lb = metric["label"] + result.append( + Label( + name=lb["name"], + values=lb["values"], + ) + ) + return result + + +def tail_from_wire(data: dict[str, Any]) -> Tail: + tail = data["tail"] + return Tail(next_seq_num=tail["seq_num"], last_timestamp=tail["timestamp"]) + + +# ──────────────────────────── Data plane (protobuf) ─────────────────────────── + + +def append_record_to_proto(record: Record) -> pb.AppendRecord: + headers = [pb.Header(name=name, value=value) for (name, value) in record.headers] + return pb.AppendRecord( + timestamp=record.timestamp, headers=headers, body=record.body + ) + + +def append_input_to_proto(input: AppendInput) -> pb.AppendInput: + records = [append_record_to_proto(r) for r in input.records] + return pb.AppendInput( + records=records, + match_seq_num=input.match_seq_num, + fencing_token=input.fencing_token, + ) + + +def append_ack_from_proto(ack: pb.AppendAck) -> AppendOutput: + return AppendOutput( + start=StreamPosition(ack.start.seq_num, ack.start.timestamp), + end=StreamPosition(ack.end.seq_num, ack.end.timestamp), + tail=StreamPosition(ack.tail.seq_num, ack.tail.timestamp), + ) + + +def read_batch_from_proto( + batch: pb.ReadBatch, ignore_command_records: bool = False +) -> tuple[list[SequencedRecord], Tail | None]: + records = [] + for sr in batch.records: + if ignore_command_records and _is_command_record(sr): + continue + records.append( + SequencedRecord( + seq_num=sr.seq_num, + body=sr.body, + headers=[(h.name, h.value) for h in sr.headers], + timestamp=sr.timestamp, + ) + ) + tail = None + if batch.HasField("tail"): + tail = Tail( + next_seq_num=batch.tail.seq_num, + last_timestamp=batch.tail.timestamp, + ) + return records, tail + + +def sequenced_record_from_proto(sr: pb.SequencedRecord) -> SequencedRecord: + return SequencedRecord( + seq_num=sr.seq_num, + body=sr.body, + headers=[(h.name, h.value) for h in sr.headers], + timestamp=sr.timestamp, + ) + + +def _is_command_record(sr: pb.SequencedRecord) -> bool: + if len(sr.headers) == 1 and sr.headers[0].name == b"": + return True + return False + + +# ──────────────────────────── Data plane (JSON) ─────────────────────────────── +# Used for unary append/read via JSON content-type + + +def append_input_to_json(input: AppendInput) -> dict[str, Any]: + records = [] + for r in input.records: + rec: dict[str, Any] = {} + if r.body: + rec["body"] = b64encode(r.body).decode() + if r.headers: + rec["headers"] = [ + [b64encode(name).decode(), b64encode(value).decode()] + for (name, value) in r.headers + ] + if r.timestamp is not None: + rec["timestamp"] = r.timestamp + records.append(rec) + + result: dict[str, Any] = {"records": records} + if input.match_seq_num is not None: + result["match_seq_num"] = input.match_seq_num + if input.fencing_token is not None: + result["fencing_token"] = input.fencing_token + return result + + +def append_ack_from_json(data: dict[str, Any]) -> AppendOutput: + return AppendOutput( + start=StreamPosition(data["start"]["seq_num"], data["start"]["timestamp"]), + end=StreamPosition(data["end"]["seq_num"], data["end"]["timestamp"]), + tail=StreamPosition(data["tail"]["seq_num"], data["tail"]["timestamp"]), + ) + + +def read_batch_from_json( + data: dict[str, Any], ignore_command_records: bool = False +) -> tuple[list[SequencedRecord], Tail | None]: + records = [] + for sr in data.get("records", []): + headers = [(b64decode(h[0]), b64decode(h[1])) for h in sr.get("headers", [])] + if ignore_command_records and len(headers) == 1 and headers[0][0] == b"": + continue + body = b64decode(sr["body"]) if sr.get("body") else b"" + records.append( + SequencedRecord( + seq_num=sr["seq_num"], + body=body, + headers=headers, + timestamp=sr["timestamp"], + ) + ) + tail = None + tail_data = data.get("tail") + if tail_data: + tail = Tail( + next_seq_num=tail_data["seq_num"], + last_timestamp=tail_data["timestamp"], + ) + return records, tail + + +def read_start_params(start: _ReadStart) -> dict[str, Any]: + if isinstance(start, SeqNum): + return {"seq_num": start.value} + elif isinstance(start, Timestamp): + return {"timestamp": start.value} + elif isinstance(start, TailOffset): + return {"tail_offset": start.value} + raise ValueError("start doesn't match any of the expected types") + + +def read_limit_params(limit: ReadLimit | None) -> dict[str, Any]: + params: dict[str, Any] = {} + if limit: + if limit.count is not None: + params["count"] = limit.count + if limit.bytes is not None: + params["bytes"] = limit.bytes + return params diff --git a/src/s2_sdk/_ops.py b/src/s2_sdk/_ops.py new file mode 100644 index 0000000..412347e --- /dev/null +++ b/src/s2_sdk/_ops.py @@ -0,0 +1,597 @@ +import uuid +from dataclasses import dataclass +from datetime import timedelta +from typing import Any, AsyncIterable, Self + +from s2_sdk import types +from s2_sdk._client import HttpClient +from s2_sdk._endpoints import Endpoints +from s2_sdk._exceptions import fallible +from s2_sdk._mappers import ( + access_token_info_from_wire, + access_token_info_to_wire, + append_ack_from_json, + append_input_to_json, + basin_config_from_wire, + basin_config_to_wire, + basin_info_from_wire, + basin_reconfiguration_to_wire, + metric_set_from_wire, + read_batch_from_json, + read_limit_params, + read_start_params, + stream_config_from_wire, + stream_config_to_wire, + stream_info_from_wire, + stream_reconfiguration_to_wire, + tail_from_wire, +) +from s2_sdk._retrier import Retrier, http_retry_on +from s2_sdk._s2s._append_session import run_append_session +from s2_sdk._s2s._read_session import run_read_session +from s2_sdk._validators import validate_append_input, validate_basin + + +def _s2_request_token() -> str: + return uuid.uuid4().hex + + +@dataclass(slots=True) +class _Config: + max_retries: int + enable_append_retries: bool + enable_compression: bool + timeout: float + access_token: str + + +class S2: + """Async client for interacting with s2.dev.""" + + __slots__ = ( + "_endpoints", + "_account_client", + "_basin_clients", + "_config", + "_retrier", + ) + + @fallible + def __init__( + self, + access_token: str, + endpoints: Endpoints | None = None, + request_timeout: timedelta = timedelta(seconds=5.0), + max_retries: int = 3, + enable_append_retries: bool = True, + enable_compression: bool = False, + ) -> None: + self._endpoints = endpoints if endpoints is not None else Endpoints.default() + self._account_client = HttpClient( + base_url=self._endpoints.account(), + access_token=access_token, + timeout=request_timeout.total_seconds(), + ) + self._basin_clients: dict[str, HttpClient] = {} + self._config = _Config( + max_retries=max_retries, + enable_append_retries=enable_append_retries, + enable_compression=enable_compression, + timeout=request_timeout.total_seconds(), + access_token=access_token, + ) + self._retrier = Retrier( + should_retry_on=http_retry_on, + max_attempts=max_retries, + ) + + async def __aenter__(self) -> Self: + return self + + async def __aexit__(self, exc_type, exc_value, traceback) -> bool: + await self.close() + if exc_type is None and exc_value is None and traceback is None: + return True + return False + + def __getitem__(self, name: str) -> "S2Basin": + return self.basin(name) + + async def close(self) -> None: + """Close all connections.""" + await self._account_client.close() + for client in self._basin_clients.values(): + await client.close() + + def _get_basin_client(self, name: str) -> HttpClient: + if name not in self._basin_clients: + self._basin_clients[name] = HttpClient( + base_url=self._endpoints.basin(name), + access_token=self._config.access_token, + timeout=self._config.timeout, + ) + return self._basin_clients[name] + + @fallible + async def create_basin( + self, + name: str, + config: types.BasinConfig | None = None, + ) -> types.BasinInfo: + """Create a basin.""" + validate_basin(name) + body: dict[str, Any] = {"basin": name} + if config is not None: + body["config"] = basin_config_to_wire(config) + + async def _do(): + return await self._account_client.request( + "POST", + "/basins", + json=body, + headers={"s2-request-token": _s2_request_token()}, + ) + + response = await self._retrier(_do) + return basin_info_from_wire(response.json()) + + def basin(self, name: str) -> "S2Basin": + """Get a Basin handle for performing basin operations.""" + validate_basin(name) + return S2Basin(name, self._get_basin_client(name), self._config) + + @fallible + async def list_basins( + self, + prefix: str = "", + start_after: str = "", + limit: int = 1000, + ) -> types.Page[types.BasinInfo]: + """List basins.""" + params: dict[str, Any] = {} + if prefix: + params["prefix"] = prefix + if start_after: + params["start_after"] = start_after + if limit != 1000: + params["limit"] = limit + + async def _do(): + return await self._account_client.request("GET", "/basins", params=params) + + response = await self._retrier(_do) + data = response.json() + return types.Page( + items=[basin_info_from_wire(b) for b in data["basins"]], + has_more=data["has_more"], + ) + + @fallible + async def delete_basin(self, name: str) -> None: + """Delete a basin.""" + + async def _do(): + return await self._account_client.request("DELETE", f"/basins/{name}") + + await self._retrier(_do) + + @fallible + async def get_basin_config(self, name: str) -> types.BasinConfig: + """Get basin configuration.""" + + async def _do(): + return await self._account_client.request("GET", f"/basins/{name}") + + response = await self._retrier(_do) + return basin_config_from_wire(response.json()) + + @fallible + async def reconfigure_basin( + self, + name: str, + config: types.BasinConfig, + ) -> types.BasinConfig: + """Modify basin configuration.""" + body = basin_reconfiguration_to_wire(config) + + async def _do(): + return await self._account_client.request( + "PATCH", f"/basins/{name}", json=body + ) + + response = await self._retrier(_do) + return basin_config_from_wire(response.json()) + + @fallible + async def issue_access_token( + self, + id: str, + scope: types.AccessTokenScope, + expires_at: str | None = None, + auto_prefix_streams: bool = False, + ) -> str: + """Issue a new access token.""" + body = access_token_info_to_wire(id, scope, auto_prefix_streams, expires_at) + + async def _do(): + return await self._account_client.request( + "POST", "/access-tokens", json=body + ) + + response = await self._retrier(_do) + return response.json()["access_token"] + + @fallible + async def list_access_tokens( + self, + prefix: str = "", + start_after: str = "", + limit: int = 1000, + ) -> types.Page[types.AccessTokenInfo]: + """List access tokens.""" + params: dict[str, Any] = {} + if prefix: + params["prefix"] = prefix + if start_after: + params["start_after"] = start_after + if limit != 1000: + params["limit"] = limit + + async def _do(): + return await self._account_client.request( + "GET", "/access-tokens", params=params + ) + + response = await self._retrier(_do) + data = response.json() + return types.Page( + items=[access_token_info_from_wire(info) for info in data["access_tokens"]], + has_more=data["has_more"], + ) + + @fallible + async def revoke_access_token(self, id: str) -> None: + """Revoke an access token.""" + + async def _do(): + return await self._account_client.request("DELETE", f"/access-tokens/{id}") + + await self._retrier(_do) + + @fallible + async def account_metrics( + self, + set: types.AccountMetricSet, + start: int | None = None, + end: int | None = None, + interval: types.TimeseriesInterval | None = None, + ) -> list[types.Scalar | types.Accumulation | types.Gauge | types.Label]: + """Get account-level metrics.""" + params: dict[str, Any] = {"set": set.value} + if start is not None: + params["start"] = start + if end is not None: + params["end"] = end + if interval is not None: + params["interval"] = interval.value + + async def _do(): + return await self._account_client.request("GET", "/metrics", params=params) + + response = await self._retrier(_do) + return metric_set_from_wire(response.json()) + + @fallible + async def basin_metrics( + self, + basin: str, + set: types.BasinMetricSet, + start: int | None = None, + end: int | None = None, + interval: types.TimeseriesInterval | None = None, + ) -> list[types.Scalar | types.Accumulation | types.Gauge | types.Label]: + """Get basin-level metrics.""" + params: dict[str, Any] = {"set": set.value} + if start is not None: + params["start"] = start + if end is not None: + params["end"] = end + if interval is not None: + params["interval"] = interval.value + + async def _do(): + return await self._account_client.request( + "GET", f"/metrics/{basin}", params=params + ) + + response = await self._retrier(_do) + return metric_set_from_wire(response.json()) + + @fallible + async def stream_metrics( + self, + basin: str, + stream: str, + set: types.StreamMetricSet, + start: int | None = None, + end: int | None = None, + interval: types.TimeseriesInterval | None = None, + ) -> list[types.Scalar | types.Accumulation | types.Gauge | types.Label]: + """Get stream-level metrics.""" + params: dict[str, Any] = {"set": set.value} + if start is not None: + params["start"] = start + if end is not None: + params["end"] = end + if interval is not None: + params["interval"] = interval.value + + async def _do(): + return await self._account_client.request( + "GET", f"/metrics/{basin}/{stream}", params=params + ) + + response = await self._retrier(_do) + return metric_set_from_wire(response.json()) + + +class S2Basin: + """Basin-level client. Returned by S2.basin(). Do not instantiate directly.""" + + __slots__ = ( + "_name", + "_client", + "_config", + "_retrier", + ) + + @fallible + def __init__( + self, + name: str, + client: HttpClient, + config: _Config, + ) -> None: + self._name = name + self._client = client + self._config = config + self._retrier = Retrier( + should_retry_on=http_retry_on, + max_attempts=config.max_retries, + ) + + def __repr__(self) -> str: + return f"S2Basin(name={self.name})" + + def __getitem__(self, name: str) -> "S2Stream": + return self.stream(name) + + @property + def name(self) -> str: + """Basin name.""" + return self._name + + @fallible + async def create_stream( + self, + name: str, + config: types.StreamConfig | None = None, + ) -> types.StreamInfo: + """Create a stream.""" + body: dict[str, Any] = {"stream": name} + if config is not None: + body["config"] = stream_config_to_wire(config) + + async def _do(): + return await self._client.request( + "POST", + "/streams", + json=body, + headers={"s2-request-token": _s2_request_token()}, + ) + + response = await self._retrier(_do) + return stream_info_from_wire(response.json()) + + def stream(self, name: str) -> "S2Stream": + """Get a Stream handle for performing stream operations.""" + return S2Stream(name, self._client, self._config) + + @fallible + async def list_streams( + self, + prefix: str = "", + start_after: str = "", + limit: int = 1000, + ) -> types.Page[types.StreamInfo]: + """List streams.""" + params: dict[str, Any] = {} + if prefix: + params["prefix"] = prefix + if start_after: + params["start_after"] = start_after + if limit != 1000: + params["limit"] = limit + + async def _do(): + return await self._client.request("GET", "/streams", params=params) + + response = await self._retrier(_do) + data = response.json() + return types.Page( + items=[stream_info_from_wire(s) for s in data["streams"]], + has_more=data["has_more"], + ) + + @fallible + async def delete_stream(self, name: str) -> None: + """Delete a stream.""" + + async def _do(): + return await self._client.request("DELETE", f"/streams/{name}") + + await self._retrier(_do) + + @fallible + async def get_stream_config(self, name: str) -> types.StreamConfig: + """Get stream configuration.""" + + async def _do(): + return await self._client.request("GET", f"/streams/{name}") + + response = await self._retrier(_do) + return stream_config_from_wire(response.json()) + + @fallible + async def reconfigure_stream( + self, + name: str, + config: types.StreamConfig, + ) -> types.StreamConfig: + """Modify stream configuration.""" + body = stream_reconfiguration_to_wire(config) + + async def _do(): + return await self._client.request("PATCH", f"/streams/{name}", json=body) + + response = await self._retrier(_do) + return stream_config_from_wire(response.json()) + + +class S2Stream: + """Stream-level client. Returned by S2Basin.stream(). Do not instantiate directly.""" + + __slots__ = ( + "_name", + "_client", + "_config", + "_retrier", + ) + + def __init__( + self, + name: str, + client: HttpClient, + config: _Config, + ) -> None: + self._name = name + self._client = client + self._config = config + self._retrier = Retrier( + should_retry_on=http_retry_on, + max_attempts=config.max_retries, + ) + + def __repr__(self) -> str: + return f"S2Stream(name={self.name})" + + @property + def name(self) -> str: + """Stream name.""" + return self._name + + @fallible + async def check_tail(self) -> types.Tail: + """Check the tail of a stream.""" + + async def _do(): + return await self._client.request( + "GET", f"/streams/{self.name}/records/tail" + ) + + response = await self._retrier(_do) + return tail_from_wire(response.json()) + + @fallible + async def append(self, input: types.AppendInput) -> types.AppendOutput: + """Append a batch of records to a stream.""" + validate_append_input(input) + body = append_input_to_json(input) + + async def _do(): + return await self._client.request( + "POST", + f"/streams/{self.name}/records", + json=body, + headers={"s2-format": "base64"}, + ) + + response = ( + await self._retrier(_do) + if self._config.enable_append_retries + else await _do() + ) + return append_ack_from_json(response.json()) + + @fallible + async def append_session( + self, inputs: AsyncIterable[types.AppendInput] + ) -> AsyncIterable[types.AppendOutput]: + """Append batches of records continuously via S2S session.""" + async for output in run_append_session( + self._client, + self.name, + inputs, + max_retries=self._config.max_retries, + enable_append_retries=self._config.enable_append_retries, + enable_compression=self._config.enable_compression, + ): + yield output + + @fallible + async def read( + self, + start: types.SeqNum | types.Timestamp | types.TailOffset, + limit: types.ReadLimit | None = None, + until: int | None = None, + ignore_command_records: bool = False, + ) -> list[types.SequencedRecord] | types.Tail: + """Read a batch of records from a stream.""" + params: dict[str, Any] = {} + params.update(read_start_params(start)) + params.update(read_limit_params(limit)) + if until is not None: + params["until"] = until + + async def _do(): + return await self._client.request( + "GET", + f"/streams/{self.name}/records", + params=params, + headers={"s2-format": "base64"}, + ) + + response = await self._retrier(_do) + + if response.status_code == 416: + return tail_from_wire(response.json()) + + records, tail = read_batch_from_json(response.json(), ignore_command_records) + if tail is not None and not records: + return types.Tail( + next_seq_num=tail.next_seq_num, + last_timestamp=tail.last_timestamp, + ) + return records + + @fallible + async def read_session( + self, + start: types.SeqNum | types.Timestamp | types.TailOffset, + limit: types.ReadLimit | None = None, + until: int | None = None, + clamp: bool = False, + ignore_command_records: bool = False, + ) -> AsyncIterable[list[types.SequencedRecord] | types.Tail]: + """Read batches of records continuously via S2S session.""" + async for batch in run_read_session( + self._client, + self.name, + start, + limit, + until, + clamp, + ignore_command_records, + max_retries=self._config.max_retries, + ): + yield batch diff --git a/src/streamstore/_retrier.py b/src/s2_sdk/_retrier.py similarity index 79% rename from src/streamstore/_retrier.py rename to src/s2_sdk/_retrier.py index e6c7556..ae22716 100644 --- a/src/streamstore/_retrier.py +++ b/src/s2_sdk/_retrier.py @@ -3,6 +3,10 @@ from dataclasses import dataclass from typing import Callable +import httpx + +from s2_sdk._exceptions import S2ApiError + @dataclass(slots=True) class Attempt: @@ -20,6 +24,14 @@ def compute_backoffs( return backoffs +def http_retry_on(e: Exception) -> bool: + if isinstance(e, S2ApiError) and e.status_code in (408, 429, 500, 502, 503, 504): + return True + if isinstance(e, httpx.TransportError): + return True + return False + + class Retrier: def __init__( self, diff --git a/src/s2_sdk/_s2s/__init__.py b/src/s2_sdk/_s2s/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/s2_sdk/_s2s/_append_session.py b/src/s2_sdk/_s2s/_append_session.py new file mode 100644 index 0000000..ce5d19e --- /dev/null +++ b/src/s2_sdk/_s2s/_append_session.py @@ -0,0 +1,227 @@ +"""Bidirectional append session over HTTP/2 with S2S/proto framing.""" + +import asyncio +from collections import deque +from typing import AsyncIterable + +from anyio import create_memory_object_stream, create_task_group +from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream + +import s2_sdk._generated.s2.v1.s2_pb2 as pb +from s2_sdk._client import HttpClient +from s2_sdk._exceptions import S2SessionError +from s2_sdk._mappers import append_ack_from_proto, append_input_to_proto +from s2_sdk._retrier import Attempt, compute_backoffs, http_retry_on +from s2_sdk._s2s._protocol import ( + COMPRESSION_NONE, + COMPRESSION_ZSTD, + encode_frame, + maybe_compress, + read_frames, +) +from s2_sdk._validators import validate_append_input +from s2_sdk.types import AppendInput, AppendOutput + +_MEMORY_STREAM_MAX_BUF_SIZE = 100 + + +async def _send_frames( + inflight_inputs: deque[AppendInput], + input_rx: MemoryObjectReceiveStream[AppendInput], + replay: list[AppendInput], + send_queue: asyncio.Queue[bytes | None], + enable_compression: bool, +): + """Send append input frames. Replays inflight inputs first on retry.""" + compression = COMPRESSION_ZSTD if enable_compression else COMPRESSION_NONE + + # Replay inflight from previous attempt + for inp in replay: + proto = append_input_to_proto(inp) + body = proto.SerializeToString() + body, comp = maybe_compress(body, compression) + frame = encode_frame(body, compression=comp) + await send_queue.put(frame) + + # Send new inputs + async for inp in input_rx: + validate_append_input(inp) + inflight_inputs.append(inp) + proto = append_input_to_proto(inp) + body = proto.SerializeToString() + body, comp = maybe_compress(body, compression) + frame = encode_frame(body, compression=comp) + await send_queue.put(frame) + + # Signal end of stream + await send_queue.put(None) + + +async def _frame_body_generator(send_queue: asyncio.Queue[bytes | None]): + """Async generator that yields frame bytes for httpx streaming request.""" + while True: + frame = await send_queue.get() + if frame is None: + return + yield frame + + +async def _receive_acks( + client: HttpClient, + stream_name: str, + attempt: Attempt, + inflight_inputs: deque[AppendInput], + output_tx: MemoryObjectSendStream[AppendOutput], + send_queue: asyncio.Queue[bytes | None], + enable_compression: bool, +): + """Send the request and receive ack frames.""" + + async def body_gen(): + async for chunk in _frame_body_generator(send_queue): + yield chunk + + async with client.stream( + "POST", + f"/streams/{stream_name}/records", + headers={ + "content-type": "s2s/proto", + "accept": "s2s/proto", + }, + ) as response: + if response.status_code != 200: + body = await response.aread() + raise S2SessionError( + body.decode("utf-8", errors="replace"), + status_code=response.status_code, + ) + + async for frame_body in read_frames(response.aiter_bytes()): + if attempt.value > 0: + attempt.value = 0 + ack = pb.AppendAck() + ack.ParseFromString(frame_body) + corresponding_input = inflight_inputs.popleft() + num_records_sent = len(corresponding_input.records) + num_records_ackd = ack.end.seq_num - ack.start.seq_num + if num_records_sent != num_records_ackd: + raise RuntimeError( + "Number of records sent doesn't match the number of acknowledgements received" + ) + await output_tx.send(append_ack_from_proto(ack)) + + +async def run_append_session( + client: HttpClient, + stream_name: str, + inputs: AsyncIterable[AppendInput], + max_retries: int, + enable_append_retries: bool, + enable_compression: bool, +) -> AsyncIterable[AppendOutput]: + """Run a bidirectional append session with retry support.""" + if not enable_append_retries: + # Simple non-retrying path + async for output in _simple_append_session( + client, stream_name, inputs, enable_compression + ): + yield output + return + + # Retrying path with inflight replay + input_tx, input_rx = create_memory_object_stream[AppendInput]( + max_buffer_size=_MEMORY_STREAM_MAX_BUF_SIZE + ) + output_tx, output_rx = create_memory_object_stream[AppendOutput]( + max_buffer_size=_MEMORY_STREAM_MAX_BUF_SIZE + ) + + async def pipe_inputs(): + async with input_tx: + async for inp in inputs: + validate_append_input(inp) + await input_tx.send(inp) + + async def retrying_inner(): + inflight_inputs: deque[AppendInput] = deque() + backoffs = compute_backoffs(max_retries) + attempt = Attempt(value=0) + async with output_tx: + while True: + send_queue: asyncio.Queue[bytes | None] = asyncio.Queue() + try: + replay = list(inflight_inputs) + async with create_task_group() as tg: + tg.start_soon( + _send_frames, + inflight_inputs, + input_rx, + replay, + send_queue, + enable_compression, + ) + tg.start_soon( + _receive_acks, + client, + stream_name, + attempt, + inflight_inputs, + output_tx, + send_queue, + enable_compression, + ) + return + except* Exception as eg: + if attempt.value < max_retries and any( + http_retry_on(e) for e in eg.exceptions + ): + await asyncio.sleep(backoffs[attempt.value]) + attempt.value += 1 + else: + raise eg + + async with create_task_group() as tg: + tg.start_soon(retrying_inner) + tg.start_soon(pipe_inputs) + async with output_rx: + async for output in output_rx: + yield output + + +async def _simple_append_session( + client: HttpClient, + stream_name: str, + inputs: AsyncIterable[AppendInput], + enable_compression: bool, +) -> AsyncIterable[AppendOutput]: + """Non-retrying append session.""" + compression = COMPRESSION_ZSTD if enable_compression else COMPRESSION_NONE + + async def body_gen(): + async for inp in inputs: + validate_append_input(inp) + proto = append_input_to_proto(inp) + body = proto.SerializeToString() + body, comp = maybe_compress(body, compression) + frame = encode_frame(body, compression=comp) + yield frame + + async with client.stream( + "POST", + f"/streams/{stream_name}/records", + headers={ + "content-type": "s2s/proto", + "accept": "s2s/proto", + }, + ) as response: + if response.status_code != 200: + body = await response.aread() + raise S2SessionError( + body.decode("utf-8", errors="replace"), + status_code=response.status_code, + ) + + async for frame_body in read_frames(response.aiter_bytes()): + ack = pb.AppendAck() + ack.ParseFromString(frame_body) + yield append_ack_from_proto(ack) diff --git a/src/s2_sdk/_s2s/_compression.py b/src/s2_sdk/_s2s/_compression.py new file mode 100644 index 0000000..afc2dbb --- /dev/null +++ b/src/s2_sdk/_s2s/_compression.py @@ -0,0 +1,32 @@ +"""S2S message-level compression (zstd and gzip).""" + +import gzip + +import zstandard + +_zstd_compressor = zstandard.ZstdCompressor() +_zstd_decompressor = zstandard.ZstdDecompressor() + +COMPRESSION_NONE = 0 +COMPRESSION_ZSTD = 1 +COMPRESSION_GZIP = 2 + + +def compress(data: bytes, compression: int) -> bytes: + match compression: + case 1: # zstd + return _zstd_compressor.compress(data) + case 2: # gzip + return gzip.compress(data) + case _: + return data + + +def decompress(data: bytes, compression: int) -> bytes: + match compression: + case 1: # zstd + return _zstd_decompressor.decompress(data) + case 2: # gzip + return gzip.decompress(data) + case _: + return data diff --git a/src/s2_sdk/_s2s/_protocol.py b/src/s2_sdk/_s2s/_protocol.py new file mode 100644 index 0000000..855050a --- /dev/null +++ b/src/s2_sdk/_s2s/_protocol.py @@ -0,0 +1,170 @@ +"""S2S binary framing protocol. + +Frame layout: [3 bytes: length] [1 byte: flags] [N bytes: body] +Flags byte: [T][CC][RRRRR] + T = terminal (1 = last message) + CC = compression (00=none, 01=zstd, 10=gzip) + R = reserved +Terminal body: [2 bytes: status code big-endian] [JSON error] +""" + +import json +import struct + +from s2_sdk._exceptions import S2SessionError +from s2_sdk._s2s._compression import compress, decompress + +# Flag bits +_TERMINAL_BIT = 0x80 +_COMPRESSION_MASK = 0x60 +_COMPRESSION_SHIFT = 5 + +# Compression codes +COMPRESSION_NONE = 0 +COMPRESSION_ZSTD = 1 +COMPRESSION_GZIP = 2 + +# Compression threshold (1 KiB) +COMPRESSION_THRESHOLD = 1024 + + +def encode_frame( + body: bytes, + terminal: bool = False, + compression: int = COMPRESSION_NONE, +) -> bytes: + """Encode a body into an S2S frame.""" + if not terminal and compression != COMPRESSION_NONE: + body = compress(body, compression) + + length = len(body) + flags = 0 + if terminal: + flags |= _TERMINAL_BIT + flags |= (compression & 0x3) << _COMPRESSION_SHIFT + + return struct.pack(">I", length)[1:] + bytes([flags]) + body + + +def decode_frame(data: bytes) -> tuple[bytes, bool, int]: + """Decode an S2S frame. Returns (body, is_terminal, compression).""" + if len(data) < 4: + raise ValueError("Frame too short") + + length = int.from_bytes(data[0:3], "big") + flags = data[3] + + terminal = bool(flags & _TERMINAL_BIT) + compression = (flags & _COMPRESSION_MASK) >> _COMPRESSION_SHIFT + + body = data[4 : 4 + length] + if len(body) < length: + raise ValueError("Incomplete frame body") + + if not terminal and compression != COMPRESSION_NONE: + body = decompress(body, compression) + + return body, terminal, compression + + +async def read_frame(aiter_bytes) -> tuple[bytes, bool]: + """Read a single frame from an async byte iterator. + + Returns (body, is_terminal). + Raises S2SessionError on terminal frames. + """ + # Read header (4 bytes: 3 length + 1 flags) + header = b"" + async for chunk in aiter_bytes: + header += chunk + if len(header) >= 4: + break + + if len(header) < 4: + raise S2SessionError("Connection closed unexpectedly", status_code=0) + + length = int.from_bytes(header[0:3], "big") + flags = header[3] + + terminal = bool(flags & _TERMINAL_BIT) + compression = (flags & _COMPRESSION_MASK) >> _COMPRESSION_SHIFT + + # Read body + remaining = header[4:] + body = remaining + while len(body) < length: + async for chunk in aiter_bytes: + body += chunk + if len(body) >= length: + break + else: + break + + body = body[:length] + + if terminal: + _handle_terminal(body) + + if compression != COMPRESSION_NONE: + body = decompress(body, compression) + + return body, terminal + + +async def read_frames(aiter_bytes): + """Read frames from an async byte stream. + + Yields decoded message bodies. Stops on terminal frame. + """ + buf = b"" + async for chunk in aiter_bytes: + buf += chunk + while len(buf) >= 4: + length = int.from_bytes(buf[0:3], "big") + total = 4 + length + if len(buf) < total: + break + + flags = buf[3] + terminal = bool(flags & _TERMINAL_BIT) + compression = (flags & _COMPRESSION_MASK) >> _COMPRESSION_SHIFT + body = buf[4:total] + buf = buf[total:] + + if terminal: + _handle_terminal(body) + return + + if compression != COMPRESSION_NONE: + body = decompress(body, compression) + + yield body + + +def _handle_terminal(body: bytes) -> None: + """Handle a terminal frame body.""" + if len(body) >= 2: + status_code = int.from_bytes(body[0:2], "big") + error_json = body[2:] + if error_json: + try: + error = json.loads(error_json) + message = error.get("message", str(error)) + except (json.JSONDecodeError, AttributeError): + message = error_json.decode("utf-8", errors="replace") + else: + message = f"Session terminated with status {status_code}" + raise S2SessionError(message, status_code=status_code) + raise S2SessionError("Session terminated", status_code=0) + + +def maybe_compress( + body: bytes, compression: int = COMPRESSION_ZSTD +) -> tuple[bytes, int]: + """Optionally compress a body if it exceeds the threshold. + + Returns (body, compression_code_used). + """ + if len(body) >= COMPRESSION_THRESHOLD and compression != COMPRESSION_NONE: + return compress(body, compression), compression + return body, COMPRESSION_NONE diff --git a/src/s2_sdk/_s2s/_read_session.py b/src/s2_sdk/_s2s/_read_session.py new file mode 100644 index 0000000..ba1a151 --- /dev/null +++ b/src/s2_sdk/_s2s/_read_session.py @@ -0,0 +1,124 @@ +"""Unidirectional read session over HTTP/2 with S2S/proto framing.""" + +import asyncio +from typing import Any, AsyncIterable + +import s2_sdk._generated.s2.v1.s2_pb2 as pb +from s2_sdk._client import HttpClient +from s2_sdk._exceptions import S2SessionError +from s2_sdk._mappers import read_batch_from_proto, read_limit_params, read_start_params +from s2_sdk._retrier import compute_backoffs, http_retry_on +from s2_sdk._s2s._protocol import read_frames +from s2_sdk.types import ReadLimit, SeqNum, SequencedRecord, Tail, TailOffset, Timestamp +from s2_sdk.utils import metered_bytes + + +async def run_read_session( + client: HttpClient, + stream_name: str, + start: SeqNum | Timestamp | TailOffset, + limit: ReadLimit | None, + until: int | None, + clamp: bool, + ignore_command_records: bool, + max_retries: int, +) -> AsyncIterable[list[SequencedRecord] | Tail]: + """Run a read session with retry and position advancement.""" + params = _build_read_params(start, limit, until, clamp) + backoffs = compute_backoffs(max_retries) + attempt = 0 + + # Track remaining limit for retry advancement + remaining_count = limit.count if limit and limit.count is not None else None + remaining_bytes = limit.bytes if limit and limit.bytes is not None else None + + while True: + try: + async with client.stream( + "GET", + f"/streams/{stream_name}/records", + params=params, + headers={"accept": "s2s/proto"}, + ) as response: + if response.status_code == 416: + # Tail response + body = await response.aread() + import json + + tail_data = json.loads(body) + tail_pos = tail_data.get("tail", {}) + yield Tail( + next_seq_num=tail_pos.get("seq_num", 0), + last_timestamp=tail_pos.get("timestamp", 0), + ) + return + + if response.status_code != 200: + body = await response.aread() + raise S2SessionError( + body.decode("utf-8", errors="replace"), + status_code=response.status_code, + ) + + async for frame_body in read_frames(response.aiter_bytes()): + if attempt > 0: + attempt = 0 + + batch = pb.ReadBatch() + batch.ParseFromString(frame_body) + records, tail = read_batch_from_proto(batch, ignore_command_records) + + if tail is not None: + if records: + yield records + yield Tail( + next_seq_num=tail.next_seq_num, + last_timestamp=tail.last_timestamp, + ) + return + + if not records: + # Empty heartbeat batch — skip + continue + + # Advance position for retry + last_record = records[-1] + params["seq_num"] = last_record.seq_num + 1 + # Remove other start params since we now use seq_num + params.pop("timestamp", None) + params.pop("tail_offset", None) + + if remaining_count is not None: + remaining_count = max(remaining_count - len(records), 0) + params["count"] = remaining_count + if remaining_bytes is not None: + remaining_bytes = max( + remaining_bytes - metered_bytes(records), 0 + ) + params["bytes"] = remaining_bytes + + yield records + + return + except Exception as e: + if attempt < max_retries and http_retry_on(e): + await asyncio.sleep(backoffs[attempt]) + attempt += 1 + else: + raise e + + +def _build_read_params( + start: SeqNum | Timestamp | TailOffset, + limit: ReadLimit | None, + until: int | None, + clamp: bool, +) -> dict[str, Any]: + params: dict[str, Any] = {} + params.update(read_start_params(start)) + params.update(read_limit_params(limit)) + if until is not None: + params["until"] = until + if clamp: + params["clamp"] = "true" + return params diff --git a/src/s2_sdk/_validators.py b/src/s2_sdk/_validators.py new file mode 100644 index 0000000..b651f9d --- /dev/null +++ b/src/s2_sdk/_validators.py @@ -0,0 +1,28 @@ +import re + +from s2_sdk.types import AppendInput +from s2_sdk.utils import metered_bytes + +_BASIN_NAME_REGEX = re.compile(r"^[a-z0-9]([a-z0-9-]*[a-z0-9])?$") + +ONE_MIB = 1024 * 1024 + + +def validate_basin(name: str) -> None: + if ( + isinstance(name, str) + and (8 <= len(name) <= 48) + and _BASIN_NAME_REGEX.match(name) + ): + return + raise ValueError(f"Invalid basin name: {name}") + + +def validate_append_input(input: AppendInput) -> None: + num_bytes = metered_bytes(input.records) + num_records = len(input.records) + if 1 <= num_records <= 1000 and num_bytes <= ONE_MIB: + return + raise ValueError( + f"Invalid append input: num_records={num_records}, metered_bytes={num_bytes}" + ) diff --git a/src/s2_sdk/py.typed b/src/s2_sdk/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/src/s2_sdk/types.py b/src/s2_sdk/types.py new file mode 100644 index 0000000..e781c5d --- /dev/null +++ b/src/s2_sdk/types.py @@ -0,0 +1,365 @@ +__all__ = [ + "Record", + "AppendInput", + "AppendOutput", + "StreamPosition", + "Tail", + "SeqNum", + "Timestamp", + "TailOffset", + "ReadLimit", + "SequencedRecord", + "Page", + "BasinScope", + "BasinState", + "BasinInfo", + "StreamInfo", + "StorageClass", + "TimestampingMode", + "Timestamping", + "StreamConfig", + "BasinConfig", + "ExactMatch", + "PrefixMatch", + "Permission", + "OperationGroupPermissions", + "Operation", + "AccessTokenScope", + "AccessTokenInfo", + "Scalar", + "Accumulation", + "Gauge", + "Label", + "MetricUnit", + "TimeseriesInterval", + "AccountMetricSet", + "BasinMetricSet", + "StreamMetricSet", +] + +from dataclasses import dataclass, field +from datetime import datetime +from enum import StrEnum +from typing import Generic, Literal, TypeVar + +T = TypeVar("T") + +ONE_MIB = 1024 * 1024 + + +# --- Core data types --- + + +@dataclass(slots=True) +class Record: + """Record to be appended to a stream.""" + + body: bytes + headers: list[tuple[bytes, bytes]] = field(default_factory=list) + timestamp: int | None = None + + +@dataclass(slots=True) +class AppendInput: + """Batch of records for append operations.""" + + records: list[Record] + match_seq_num: int | None = None + fencing_token: str | None = None + + +@dataclass(slots=True) +class StreamPosition: + """Position of a record in a stream.""" + + seq_num: int + timestamp: int + + +@dataclass(slots=True) +class AppendOutput: + """Result from an append operation.""" + + start: StreamPosition + end: StreamPosition + tail: StreamPosition + + +@dataclass(slots=True) +class Tail: + """Tail of a stream.""" + + next_seq_num: int + last_timestamp: int + + +@dataclass(slots=True) +class ReadLimit: + """Limits for read operations.""" + + count: int | None = None + bytes: int | None = None + + +@dataclass(slots=True) +class SequencedRecord: + """Record read from a stream.""" + + seq_num: int + body: bytes + headers: list[tuple[bytes, bytes]] + timestamp: int + + +# --- Start position types --- + + +@dataclass(slots=True) +class SeqNum: + value: int + + +@dataclass(slots=True) +class Timestamp: + value: int + + +@dataclass(slots=True) +class TailOffset: + """Number of records before the tail.""" + + value: int + + +# --- Pagination --- + + +@dataclass(slots=True) +class Page(Generic[T]): + """Page of items.""" + + items: list[T] + has_more: bool + + +# --- String-based enums (v1 API) --- + + +class StorageClass(StrEnum): + STANDARD = "standard" + EXPRESS = "express" + + +class TimestampingMode(StrEnum): + CLIENT_PREFER = "client-prefer" + CLIENT_REQUIRE = "client-require" + ARRIVAL = "arrival" + + +class BasinScope(StrEnum): + AWS_US_EAST_1 = "aws:us-east-1" + + +class BasinState(StrEnum): + ACTIVE = "active" + CREATING = "creating" + DELETING = "deleting" + + +class Operation(StrEnum): + LIST_BASINS = "list-basins" + CREATE_BASIN = "create-basin" + DELETE_BASIN = "delete-basin" + RECONFIGURE_BASIN = "reconfigure-basin" + GET_BASIN_CONFIG = "get-basin-config" + ISSUE_ACCESS_TOKEN = "issue-access-token" + REVOKE_ACCESS_TOKEN = "revoke-access-token" + LIST_ACCESS_TOKENS = "list-access-tokens" + LIST_STREAMS = "list-streams" + CREATE_STREAM = "create-stream" + DELETE_STREAM = "delete-stream" + GET_STREAM_CONFIG = "get-stream-config" + RECONFIGURE_STREAM = "reconfigure-stream" + CHECK_TAIL = "check-tail" + APPEND = "append" + READ = "read" + TRIM = "trim" + FENCE = "fence" + ACCOUNT_METRICS = "account-metrics" + BASIN_METRICS = "basin-metrics" + STREAM_METRICS = "stream-metrics" + + +class Permission(StrEnum): + READ = "read" + WRITE = "write" + READ_WRITE = "read-write" + + +class MetricUnit(StrEnum): + BYTES = "bytes" + OPERATIONS = "operations" + + +class TimeseriesInterval(StrEnum): + MINUTE = "minute" + HOUR = "hour" + DAY = "day" + + +class AccountMetricSet(StrEnum): + ACTIVE_BASINS = "active-basins" + ACCOUNT_OPS = "account-ops" + + +class BasinMetricSet(StrEnum): + STORAGE = "storage" + APPEND_OPS = "append-ops" + READ_OPS = "read-ops" + READ_THROUGHPUT = "read-throughput" + APPEND_THROUGHPUT = "append-throughput" + BASIN_OPS = "basin-ops" + + +class StreamMetricSet(StrEnum): + STORAGE = "storage" + + +# --- Configuration types --- + + +@dataclass(slots=True) +class Timestamping: + """Timestamping behavior.""" + + mode: TimestampingMode | None = None + uncapped: bool | None = None + + +@dataclass(slots=True) +class StreamConfig: + """Stream configuration.""" + + storage_class: StorageClass | None = None + retention_policy: int | Literal["infinite"] | None = None + timestamping: Timestamping | None = None + delete_on_empty_min_age: int | None = None + + +@dataclass(slots=True) +class BasinConfig: + """Basin configuration.""" + + default_stream_config: StreamConfig | None = None + create_stream_on_append: bool | None = None + create_stream_on_read: bool | None = None + + +# --- Info types --- + + +@dataclass(slots=True) +class BasinInfo: + """Basin information.""" + + name: str + scope: BasinScope + state: BasinState + + +@dataclass(slots=True) +class StreamInfo: + """Stream information.""" + + name: str + created_at: datetime + deleted_at: datetime | None + + +# --- Resource matching (v1 oneOf pattern) --- + + +@dataclass(slots=True) +class ExactMatch: + """Match only the resource with this exact name.""" + + value: str + + +@dataclass(slots=True) +class PrefixMatch: + """Match all resources that start with this prefix.""" + + value: str + + +# --- Access control --- + + +@dataclass(slots=True) +class OperationGroupPermissions: + """Operation group permissions.""" + + account: Permission | None = None + basin: Permission | None = None + stream: Permission | None = None + + +@dataclass(slots=True) +class AccessTokenScope: + """Access token scope.""" + + basins: ExactMatch | PrefixMatch | None = None + streams: ExactMatch | PrefixMatch | None = None + access_tokens: ExactMatch | PrefixMatch | None = None + op_groups: OperationGroupPermissions | None = None + ops: list[Operation] = field(default_factory=list) + + +@dataclass(slots=True) +class AccessTokenInfo: + """Access token information.""" + + id: str + scope: AccessTokenScope + expires_at: str | None + auto_prefix_streams: bool + + +# --- Metrics types --- + + +@dataclass(slots=True) +class Scalar: + """Single named metric value.""" + + name: str + unit: MetricUnit + value: float + + +@dataclass(slots=True) +class Accumulation: + """Timeseries of accumulated values over buckets.""" + + name: str + unit: MetricUnit + bucket_length: TimeseriesInterval + values: list[tuple[int, float]] + + +@dataclass(slots=True) +class Gauge: + """Timeseries of instantaneous values.""" + + name: str + unit: MetricUnit + values: list[tuple[int, float]] + + +@dataclass(slots=True) +class Label: + """Set of string labels.""" + + name: str + values: list[str] diff --git a/src/streamstore/utils.py b/src/s2_sdk/utils.py similarity index 75% rename from src/streamstore/utils.py rename to src/s2_sdk/utils.py index d8f1978..131bb57 100644 --- a/src/streamstore/utils.py +++ b/src/s2_sdk/utils.py @@ -5,26 +5,18 @@ from datetime import datetime, timedelta from typing import AsyncIterable, Iterable, Self -from streamstore.schemas import ONE_MIB, AppendInput, Record, SequencedRecord +from s2_sdk.types import ONE_MIB, AppendInput, Record, SequencedRecord class CommandRecord: - """ - Factory class for creating `command records `_. - """ + """Factory class for creating command records.""" FENCE = b"fence" TRIM = b"trim" @staticmethod def fence(token: str) -> Record: - """ - Create a fence command record. - - Args: - token: Fencing token. Its UTF-8 byte count must not exceed 36 bytes. If empty, clears - the previously set token. - """ + """Create a fence command record.""" encoded_token = token.encode() if len(encoded_token) > 36: raise ValueError("UTF-8 byte count of fencing token exceeds 36 bytes") @@ -32,17 +24,7 @@ def fence(token: str) -> Record: @staticmethod def trim(desired_first_seq_num: int) -> Record: - """ - Create a trim command record. - - Args: - desired_first_seq_num: Sequence number for the first record to exist after trimming - preceeding records in the stream. - - Note: - If ``desired_first_seq_num`` was smaller than the sequence number for the first existing - record in the stream, trimming doesn't happen. - """ + """Create a trim command record.""" return Record( body=desired_first_seq_num.to_bytes(8), headers=[(bytes(), CommandRecord.TRIM)], @@ -50,16 +32,7 @@ def trim(desired_first_seq_num: int) -> Record: def metered_bytes(records: Iterable[Record | SequencedRecord]) -> int: - """ - Each record is metered using the following formula: - - .. code-block:: python - - 8 + 2 * len(headers) - + sum((len(name) + len(value)) for (name, value) in headers) - + len(body) - - """ + """Calculate metered bytes for a batch of records.""" return sum( ( 8 @@ -192,27 +165,7 @@ async def append_inputs_gen( max_bytes_per_batch: int = ONE_MIB, max_linger_per_batch: timedelta | None = None, ) -> AsyncIterable[AppendInput]: - """ - Generator function for batching records and yielding :class:`.AppendInput`. - - Returned generator object can be used as the parameter to :meth:`.Stream.append_session`. - - Yields: - :class:`.AppendInput` - - Args: - records: Records that have to be appended to a stream. - match_seq_num: If it is not ``None``, it is used in the first yield of :class:`.AppendInput` - and is automatically advanced for subsequent yields. - fencing_token: Used in each yield of :class:`.AppendInput`. - max_records_per_batch: Maximum number of records in each batch. - max_bytes_per_batch: Maximum size of each batch calculated using :func:`.metered_bytes`. - max_linger_per_batch: Maximum duration for each batch to accumulate records before yielding. - - Note: - If ``max_linger_per_batch`` is ``None``, :class:`.AppendInput` will be yielded only - when ``max_records_per_batch`` or ``max_bytes_per_batch`` is reached. - """ + """Generator for batching records into AppendInput objects.""" append_input_queue: Queue[AppendInput | None] = Queue() append_input_aiter = _AppendInputAsyncIterator(append_input_queue) batcher = _AutoBatcher( diff --git a/src/streamstore/__init__.py b/src/streamstore/__init__.py deleted file mode 100644 index 5c21e12..0000000 --- a/src/streamstore/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -__all__ = [ - "S2", - "Basin", - "Stream", - "S2Error", - "streamstore.schemas", - "streamstore.utils", -] - -from streamstore._client import S2, Basin, Stream -from streamstore._exceptions import S2Error diff --git a/src/streamstore/_client.py b/src/streamstore/_client.py deleted file mode 100644 index 0fdf561..0000000 --- a/src/streamstore/_client.py +++ /dev/null @@ -1,1004 +0,0 @@ -import asyncio -import re -import uuid -from collections import deque -from dataclasses import dataclass -from datetime import timedelta -from typing import AsyncIterable, Self, cast - -from anyio import create_memory_object_stream, create_task_group -from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream -from google.protobuf.field_mask_pb2 import FieldMask -from grpc import Compression, StatusCode, ssl_channel_credentials -from grpc.aio import AioRpcError, Channel, secure_channel - -from streamstore import schemas -from streamstore._exceptions import fallible -from streamstore._lib.s2.v1alpha.s2_pb2 import ( - AppendRequest, - AppendSessionRequest, - BasinConfig, - CheckTailRequest, - CreateBasinRequest, - CreateStreamRequest, - DeleteBasinRequest, - DeleteStreamRequest, - GetBasinConfigRequest, - GetStreamConfigRequest, - IssueAccessTokenRequest, - ListAccessTokensRequest, - ListBasinsRequest, - ListStreamsRequest, - ReadSessionRequest, - ReconfigureBasinRequest, - ReconfigureStreamRequest, - RevokeAccessTokenRequest, - StreamConfig, -) -from streamstore._lib.s2.v1alpha.s2_pb2_grpc import ( - AccountServiceStub, - BasinServiceStub, - StreamServiceStub, -) -from streamstore._mappers import ( - access_token_info_message, - access_token_info_schema, - append_input_message, - append_output_schema, - basin_config_message, - basin_config_schema, - basin_info_schema, - read_request_message, - read_session_request_message, - sequenced_records_schema, - stream_config_message, - stream_config_schema, - stream_info_schema, -) -from streamstore._retrier import Attempt, Retrier, compute_backoffs -from streamstore.utils import metered_bytes - -_BASIN_NAME_REGEX = re.compile(r"^[a-z0-9]([a-z0-9-]*[a-z0-9])?$") -_MEMORY_STREAM_MAX_BUF_SIZE = 100 - - -def _grpc_retry_on(e: Exception) -> bool: - if isinstance(e, AioRpcError) and e.code() in ( - StatusCode.DEADLINE_EXCEEDED, - StatusCode.UNAVAILABLE, - StatusCode.UNKNOWN, - ): - return True - return False - - -def _validate_basin(name: str) -> None: - if ( - isinstance(name, str) - and (8 <= len(name) <= 48) - and _BASIN_NAME_REGEX.match(name) - ): - return - raise ValueError(f"Invalid basin name: {name}") - - -def _s2_request_token() -> str: - return uuid.uuid4().hex - - -def _validate_append_input(input: schemas.AppendInput) -> None: - num_bytes = metered_bytes(input.records) - num_records = len(input.records) - if 1 <= num_records <= 1000 and num_bytes <= schemas.ONE_MIB: - return - raise ValueError( - f"Invalid append input: num_records={num_records}, metered_bytes={num_bytes}" - ) - - -async def _pipe_append_inputs( - inputs: AsyncIterable[schemas.AppendInput], - input_tx: MemoryObjectSendStream[schemas.AppendInput], -): - async with input_tx: - async for input in inputs: - _validate_append_input(input) - await input_tx.send(input) - - -async def _append_session_request_aiter( - stream: str, - inputs: AsyncIterable[schemas.AppendInput], -) -> AsyncIterable[AppendSessionRequest]: - async for input in inputs: - _validate_append_input(input) - yield AppendSessionRequest(input=append_input_message(stream, input)) - - -def _prepare_read_session_request_for_retry( - request: ReadSessionRequest, last_read_batch: list[schemas.SequencedRecord] -) -> None: - if len(last_read_batch) > 0: - request.seq_num = last_read_batch[-1].seq_num + 1 - if request.limit.count is not None and request.limit.count != 0: - request.limit.count = max(request.limit.count - len(last_read_batch), 0) - if request.limit.bytes is not None and request.limit.bytes != 0: - request.limit.bytes = max( - request.limit.bytes - metered_bytes(last_read_batch), - 0, - ) - - -@dataclass(slots=True) -class _RpcConfig: - timeout: float - metadata: list[tuple[str, str]] - compression: Compression - - -@dataclass(slots=True) -class _Config: - max_retries: int - enable_append_retries: bool - rpc: _RpcConfig - - -class S2: - """ - Async client for interacting with `s2.dev `_. - - Args: - access_token: Access token generated from `S2 dashboard `_. - endpoints: S2 endpoints. If not specified, public endpoints for S2 service running in AWS cloud will be used. - request_timeout: Timeout for requests made by the client. Default value is ``5`` seconds. - max_retries: Maximum number of retries for a request. Default value is ``3``. - enable_append_retries: Enable retries for appends i.e for both :meth:`.Stream.append` and - :meth:`.Stream.append_session`. Default value is ``True``. - enable_compression: Enable compression (Gzip) for :meth:`.Stream.append`, :meth:`.Stream.append_session`, - :meth:`.Stream.read`, and :meth:`.Stream.read_session`. Default value is ``False``. - """ - - __slots__ = ( - "_endpoints", - "_account_channel", - "_basin_channels", - "_config", - "_stub", - "_retrier", - ) - - @fallible - def __init__( - self, - access_token: str, - endpoints: schemas.Endpoints | None = None, - request_timeout: timedelta = timedelta(seconds=5.0), - max_retries: int = 3, - enable_append_retries: bool = True, - enable_compression: bool = False, - ) -> None: - self._endpoints = ( - endpoints - if endpoints is not None - else schemas.Endpoints.for_cloud(schemas.Cloud.AWS) - ) - self._account_channel = secure_channel( - target=self._endpoints._account(), - credentials=ssl_channel_credentials(), - ) - self._basin_channels: dict[str, Channel] = {} - self._config = _Config( - max_retries=max_retries, - enable_append_retries=enable_append_retries, - rpc=_RpcConfig( - timeout=request_timeout.total_seconds(), - metadata=[("authorization", f"Bearer {access_token}")], - compression=Compression.Gzip - if enable_compression - else Compression.NoCompression, - ), - ) - self._stub = AccountServiceStub(self._account_channel) - self._retrier = Retrier( - should_retry_on=_grpc_retry_on, - max_attempts=max_retries, - ) - - async def __aenter__(self) -> Self: - return self - - async def __aexit__(self, exc_type, exc_value, traceback) -> bool: - await self.close() - if exc_type is None and exc_value is None and traceback is None: - return True - return False - - def __getitem__(self, name: str) -> "Basin": - return self.basin(name) - - async def close(self) -> None: - """ - Close all open connections to S2 service endpoints. - - Tip: - ``S2`` supports async context manager protocol, so you can also do the following instead of - explicitly closing: - - .. code-block:: python - - async with S2(..) as s2: - .. - - """ - - await self._account_channel.close(None) - for basin_channel in self._basin_channels.values(): - await basin_channel.close(None) - - @fallible - async def create_basin( - self, - name: str, - config: schemas.BasinConfig | None = None, - ) -> schemas.BasinInfo: - """ - Create a basin. - - Args: - name: Name of the basin. - config: Configuration for the basin. - - Note: - ``name`` must be globally unique and must be between 8 and 48 characters, comprising lowercase - letters, numbers and hyphens. It cannot begin or end with a hyphen. - """ - _validate_basin(name) - request = CreateBasinRequest( - basin=name, - config=cast( - BasinConfig, - basin_config_message(config), - ), - ) - metadata = self._config.rpc.metadata + [ - ("s2-request-token", _s2_request_token()) - ] - response = await self._retrier( - self._stub.CreateBasin, - request, - timeout=self._config.rpc.timeout, - metadata=metadata, - ) - return basin_info_schema(response.info) - - def basin(self, name: str) -> "Basin": - """ - Get a Basin object that can be used for performing basin operations. - - Args: - name: Name of the basin. - - Note: - The basin must have been created already, else the operations will fail. - - Tip: - .. code-block:: python - - async with S2(..) as s2: - basin = s2.basin("your-basin-name") - - :class:`.S2` implements the ``getitem`` magic method, so you can also do the following instead: - - .. code-block:: python - - async with S2(..) as s2: - basin = s2["your-basin-name"] - """ - _validate_basin(name) - if name not in self._basin_channels: - self._basin_channels[name] = secure_channel( - target=self._endpoints._basin(name), - credentials=ssl_channel_credentials(), - ) - return Basin(name, self._basin_channels[name], self._config) - - @fallible - async def list_basins( - self, - prefix: str = "", - start_after: str = "", - limit: int = 1000, - ) -> schemas.Page[schemas.BasinInfo]: - """ - List basins. - - Args: - prefix: Filter to basins whose name begins with this prefix. - start_after: Filter to basins whose name starts lexicographically after this value. - limit: Number of items to return per page, up to a maximum of 1000. - """ - request = ListBasinsRequest(prefix=prefix, start_after=start_after, limit=limit) - response = await self._retrier( - self._stub.ListBasins, - request, - timeout=self._config.rpc.timeout, - metadata=self._config.rpc.metadata, - ) - return schemas.Page( - items=[basin_info_schema(b) for b in response.basins], - has_more=response.has_more, - ) - - @fallible - async def delete_basin(self, name: str) -> None: - """ - Delete a basin. - - Args: - name: Name of the basin. - - Note: - Basin deletion is asynchronous, and may take a few minutes to complete. - """ - request = DeleteBasinRequest(basin=name) - await self._retrier( - self._stub.DeleteBasin, - request, - timeout=self._config.rpc.timeout, - metadata=self._config.rpc.metadata, - ) - - @fallible - async def get_basin_config(self, name: str) -> schemas.BasinConfig: - """ - Get the current configuration of a basin. - - Args: - name: Name of the basin. - """ - request = GetBasinConfigRequest(basin=name) - response = await self._retrier( - self._stub.GetBasinConfig, - request, - timeout=self._config.rpc.timeout, - metadata=self._config.rpc.metadata, - ) - return basin_config_schema(response.config) - - @fallible - async def reconfigure_basin( - self, - name: str, - config: schemas.BasinConfig, - ) -> schemas.BasinConfig: - """ - Modify the configuration of a basin. - - Args: - name: Name of the basin. - config: Configuration for the basin. - - Note: - Modifiying the :attr:`.BasinConfig.default_stream_config` doesn't affect already - existing streams; it only applies to new streams created hereafter. - """ - basin_config, mask_paths = cast( - tuple[BasinConfig, list[str]], - basin_config_message( - config, - return_mask_paths=True, - ), - ) - request = ReconfigureBasinRequest( - basin=name, config=basin_config, mask=FieldMask(paths=mask_paths) - ) - response = await self._retrier( - self._stub.ReconfigureBasin, - request, - timeout=self._config.rpc.timeout, - metadata=self._config.rpc.metadata, - ) - return basin_config_schema(response.config) - - @fallible - async def issue_access_token( - self, - id: str, - scope: schemas.AccessTokenScope, - expires_at: int | None = None, - auto_prefix_streams: bool = False, - ) -> str: - """ - Issue a new access token. - - Args: - id: Access token ID. - scope: Access token scope. - expires_at: Expiration time in seconds since Unix epoch. If not specified, expiration - time of ``access_token`` passed to :class:`.S2` will be used. - auto_prefix_streams: Enable auto-prefixing: the specified prefix in - :attr:`.AccessTokenScope.streams` will be added to stream names in requests and stripped - from stream names in responses. - - Note: - ``id`` must be unique to the account and between 1 and 96 bytes in length. - """ - request = IssueAccessTokenRequest( - info=access_token_info_message(id, scope, auto_prefix_streams, expires_at) - ) - response = await self._retrier( - self._stub.IssueAccessToken, - request, - timeout=self._config.rpc.timeout, - metadata=self._config.rpc.metadata, - ) - return response.access_token - - @fallible - async def list_access_tokens( - self, prefix: str = "", start_after: str = "", limit: int = 1000 - ) -> schemas.Page[schemas.AccessTokenInfo]: - """ - List access tokens. - - Args: - prefix: Filter to access tokens whose ID begins with this prefix. - start_after: Filter to access tokens whose ID starts lexicographically after this value. - limit: Number of items to return per page, up to a maximum of 1000. - """ - request = ListAccessTokensRequest( - prefix=prefix, start_after=start_after, limit=limit - ) - response = await self._retrier( - self._stub.ListAccessTokens, - request, - timeout=self._config.rpc.timeout, - metadata=self._config.rpc.metadata, - ) - return schemas.Page( - items=[access_token_info_schema(info) for info in response.access_tokens], - has_more=response.has_more, - ) - - @fallible - async def revoke_access_token(self, id: str) -> schemas.AccessTokenInfo: - """ - Revoke an access token. - - Args: - id: Access token ID. - """ - request = RevokeAccessTokenRequest(id=id) - response = await self._retrier( - self._stub.RevokeAccessToken, - request, - timeout=self._config.rpc.timeout, - metadata=self._config.rpc.metadata, - ) - return access_token_info_schema(response.info) - - -class Basin: - """ - Caution: - Returned by :meth:`.S2.basin`. Do not instantiate directly. - """ - - __slots__ = ( - "_channel", - "_config", - "_retrier", - "_stub", - "_name", - ) - - @fallible - def __init__( - self, - name: str, - channel: Channel, - config: _Config, - ) -> None: - self._channel = channel - self._config = config - self._retrier = Retrier( - should_retry_on=_grpc_retry_on, - max_attempts=config.max_retries, - ) - self._stub = BasinServiceStub(self._channel) - self._name = name - - def __repr__(self) -> str: - return f"Basin(name={self.name})" - - def __getitem__(self, name: str) -> "Stream": - return self.stream(name) - - @property - def name(self) -> str: - """Basin name.""" - return self._name - - @fallible - async def create_stream( - self, - name: str, - config: schemas.StreamConfig | None = None, - ) -> schemas.StreamInfo: - """ - Create a stream. - - Args: - name: Name of the stream. - config: Configuration for the stream. - - Note: - ``name`` must be unique within the basin. It can be an arbitrary string upto 512 characters. - Backslash (``/``) is recommended as a delimiter for hierarchical naming. - """ - request = CreateStreamRequest( - stream=name, - config=cast( - StreamConfig, - stream_config_message(config), - ), - ) - metadata = self._config.rpc.metadata + [ - ("s2-request-token", _s2_request_token()) - ] - response = await self._retrier( - self._stub.CreateStream, - request, - timeout=self._config.rpc.timeout, - metadata=metadata, - ) - return stream_info_schema(response.info) - - def stream(self, name: str) -> "Stream": - """ - Get a Stream object that can be used for performing stream operations. - - Args: - name: Name of the stream. - - Note: - The stream must have been created already, else the operations will fail. - - Tip: - .. code-block:: python - - async with S2(..) as s2: - stream = s2.basin("your-basin-name").stream("your-stream-name") - - :class:`.Basin` implements the ``getitem`` magic method, so you can also do the following instead: - - .. code-block:: python - - async with S2(..) as s2: - stream = s2["your-basin-name"]["your-stream-name"] - - """ - return Stream(name, self._channel, self._config) - - @fallible - async def list_streams( - self, - prefix: str = "", - start_after: str = "", - limit: int = 1000, - ) -> schemas.Page[schemas.StreamInfo]: - """ - List streams. - - Args: - prefix: Filter to streams whose name begins with this prefix. - start_after: Filter to streams whose name starts lexicographically after this value. - limit: Number of items to return per page, up to a maximum of 1000. - """ - request = ListStreamsRequest( - prefix=prefix, start_after=start_after, limit=limit - ) - response = await self._retrier( - self._stub.ListStreams, - request, - timeout=self._config.rpc.timeout, - metadata=self._config.rpc.metadata, - ) - return schemas.Page( - items=[stream_info_schema(s) for s in response.streams], - has_more=response.has_more, - ) - - @fallible - async def delete_stream(self, name: str) -> None: - """ - Delete a stream. - - Args: - name: Name of the stream. - - Note: - Stream deletion is asynchronous, and may take a few minutes to complete. - """ - request = DeleteStreamRequest(stream=name) - await self._retrier( - self._stub.DeleteStream, - request, - timeout=self._config.rpc.timeout, - metadata=self._config.rpc.metadata, - ) - - @fallible - async def get_stream_config(self, name: str) -> schemas.StreamConfig: - """ - Get the current configuration of a stream. - - Args: - name: Name of the stream. - """ - request = GetStreamConfigRequest(stream=name) - response = await self._retrier( - self._stub.GetStreamConfig, - request, - timeout=self._config.rpc.timeout, - metadata=self._config.rpc.metadata, - ) - return stream_config_schema(response.config) - - @fallible - async def reconfigure_stream( - self, - name: str, - config: schemas.StreamConfig, - ) -> schemas.StreamConfig: - """ - Modify the configuration of a stream. - - Args: - name: Name of the stream. - config: Configuration for the stream. - - Note: - Modifying :attr:`.StreamConfig.storage_class` will take effect only when this stream has - been inactive for 10 minutes. This will become a live migration in future. - """ - stream_config, mask_paths = cast( - tuple[StreamConfig, list[str]], - stream_config_message(config, return_mask_paths=True), - ) - request = ReconfigureStreamRequest( - stream=name, config=stream_config, mask=FieldMask(paths=mask_paths) - ) - response = await self._retrier( - self._stub.ReconfigureStream, - request, - timeout=self._config.rpc.timeout, - metadata=self._config.rpc.metadata, - ) - return stream_config_schema(response.config) - - -class Stream: - """ - Caution: - Returned by :meth:`.Basin.stream`. Do not instantiate directly. - """ - - __slots__ = ( - "_name", - "_config", - "_retrier", - "_stub", - ) - - def __init__(self, name: str, channel: Channel, config: _Config) -> None: - self._name = name - self._config = config - self._retrier = Retrier( - should_retry_on=_grpc_retry_on, - max_attempts=config.max_retries, - ) - self._stub = StreamServiceStub(channel) - - def __repr__(self) -> str: - return f"Stream(name={self.name})" - - @property - def name(self) -> str: - """Stream name.""" - return self._name - - @fallible - async def check_tail(self) -> schemas.Tail: - """ - Check the tail of a stream. - """ - request = CheckTailRequest(stream=self.name) - response = await self._retrier( - self._stub.CheckTail, - request, - timeout=self._config.rpc.timeout, - metadata=self._config.rpc.metadata, - ) - return schemas.Tail(response.next_seq_num, response.last_timestamp) - - @fallible - async def append(self, input: schemas.AppendInput) -> schemas.AppendOutput: - """ - Append a batch of records to a stream. - """ - _validate_append_input(input) - request = AppendRequest(input=append_input_message(self.name, input)) - response = ( - await self._retrier( - self._stub.Append, - request, - timeout=self._config.rpc.timeout, - metadata=self._config.rpc.metadata, - compression=self._config.rpc.compression, - ) - if self._config.enable_append_retries - else await self._stub.Append( - request, - timeout=self._config.rpc.timeout, - metadata=self._config.rpc.metadata, - compression=self._config.rpc.compression, - ) - ) - return append_output_schema(response.output) - - async def _append_session( - self, - attempt: Attempt, - inflight_inputs: deque[schemas.AppendInput], - request_rx: MemoryObjectReceiveStream[AppendSessionRequest], - output_tx: MemoryObjectSendStream[schemas.AppendOutput], - ): - async for response in self._stub.AppendSession( - request_rx, - metadata=self._config.rpc.metadata, - compression=self._config.rpc.compression, - ): - if attempt.value > 0: - attempt.value = 0 - output = response.output - corresponding_input = inflight_inputs.popleft() - num_records_sent = len(corresponding_input.records) - num_records_ackd = output.end_seq_num - output.start_seq_num - if num_records_sent == num_records_ackd: - await output_tx.send(append_output_schema(response.output)) - else: - raise RuntimeError( - "Number of records sent doesn't match the number of acknowledgements received" - ) - - async def _retrying_append_session_inner( - self, - input_rx: MemoryObjectReceiveStream[schemas.AppendInput], - output_tx: MemoryObjectSendStream[schemas.AppendOutput], - ): - inflight_inputs: deque[schemas.AppendInput] = deque() - max_attempts = self._config.max_retries - backoffs = compute_backoffs(max_attempts) - attempt = Attempt(value=0) - async with output_tx: - while True: - request_tx, request_rx = create_memory_object_stream[ - AppendSessionRequest - ](max_buffer_size=_MEMORY_STREAM_MAX_BUF_SIZE) - try: - async with create_task_group() as tg: - tg.start_soon( - self._append_session, - attempt, - inflight_inputs, - request_rx, - output_tx, - ) - async with request_tx: - if len(inflight_inputs) > 0: - for input in list(inflight_inputs): - await request_tx.send( - AppendSessionRequest( - input=append_input_message(self.name, input) - ) - ) - async for input in input_rx: - inflight_inputs.append(input) - await request_tx.send( - AppendSessionRequest( - input=append_input_message(self.name, input) - ) - ) - return - except* AioRpcError as eg: - if attempt.value < max_attempts and any( - _grpc_retry_on(e) for e in eg.exceptions - ): - await asyncio.sleep(backoffs[attempt.value]) - attempt.value += 1 - else: - raise eg - - async def _retrying_append_session( - self, - inputs: AsyncIterable[schemas.AppendInput], - ) -> AsyncIterable[schemas.AppendOutput]: - input_tx, input_rx = create_memory_object_stream[schemas.AppendInput]( - max_buffer_size=_MEMORY_STREAM_MAX_BUF_SIZE - ) - output_tx, output_rx = create_memory_object_stream[schemas.AppendOutput]( - max_buffer_size=_MEMORY_STREAM_MAX_BUF_SIZE - ) - async with create_task_group() as tg: - tg.start_soon( - self._retrying_append_session_inner, - input_rx, - output_tx, - ) - tg.start_soon(_pipe_append_inputs, inputs, input_tx) - async with output_rx: - async for output in output_rx: - yield output - - @fallible - async def append_session( - self, inputs: AsyncIterable[schemas.AppendInput] - ) -> AsyncIterable[schemas.AppendOutput]: - """ - Append batches of records to a stream continuously, while guaranteeing pipelined inputs are - processed in order. - - Tip: - You can use :func:`.append_inputs_gen` for automatic batching of records instead of explicitly - preparing and providing batches of records. - - Yields: - :class:`.AppendOutput` for each corresponding :class:`.AppendInput`. - - Returns: - If ``enable_append_retries=False`` in :class:`.S2`, and if processing any of the - :class:`.AppendInput` fails. - - (or) - - If ``enable_append_retries=True`` in :class:`.S2`, and if retry budget gets exhausted after - trying to recover from failures. - """ - if self._config.enable_append_retries: - async for output in self._retrying_append_session(inputs): - yield output - else: - async for response in self._stub.AppendSession( - _append_session_request_aiter(self.name, inputs), - metadata=self._config.rpc.metadata, - compression=self._config.rpc.compression, - ): - yield append_output_schema(response.output) - - @fallible - async def read( - self, - start: schemas.SeqNum | schemas.Timestamp | schemas.TailOffset, - limit: schemas.ReadLimit | None = None, - until: int | None = None, - ignore_command_records: bool = False, - ) -> list[schemas.SequencedRecord] | schemas.Tail: - """ - Read a batch of records from a stream. - - Args: - start: Inclusive start position. - limit: Number of records to return, up to a maximum of 1000 or 1MiB of :func:`.metered_bytes`. - until: Exclusive timestamp to read until. It is applied as an additional constraint on - top of the ``limit`` and guarantees that all returned records have timestamps less - than this timestamp. - ignore_command_records: Filters out command records if present from the batch. - - Returns: - Batch of sequenced records. It can be empty only if ``limit`` and/or ``until`` were provided - and no records satisfy those constraints. - - (or) - - Tail of the stream. It will be returned only if ``start`` equals or exceeds the tail of - the stream. - """ - request = read_request_message(self.name, start, limit, until) - response = await self._retrier( - self._stub.Read, - request, - timeout=self._config.rpc.timeout, - metadata=self._config.rpc.metadata, - compression=self._config.rpc.compression, - ) - output = response.output - match output.WhichOneof("output"): - case "batch": - return sequenced_records_schema(output.batch, ignore_command_records) - case "next_seq_num": - # TODO: use correct last_timestamp when migrating to v1 API. - return schemas.Tail(output.next_seq_num, 0) - case _: - raise RuntimeError( - "Read output doesn't match any of the expected values" - ) - - @fallible - async def read_session( - self, - start: schemas.SeqNum | schemas.Timestamp | schemas.TailOffset, - limit: schemas.ReadLimit | None = None, - until: int | None = None, - clamp: bool = False, - ignore_command_records: bool = False, - ) -> AsyncIterable[list[schemas.SequencedRecord] | schemas.Tail]: - """ - Read batches of records from a stream continuously. - - Args: - start: Inclusive start position. - limit: Number of records to return, up to a maximum of 1000 or 1MiB of :func:`.metered_bytes`. - until: Exclusive timestamp to read until. It is applied as an additional constraint on - top of the ``limit`` and guarantees that all returned records have timestamps less - than this timestamp. - clamp: Clamp the ``start`` position to the stream's tail when it exceeds the tail. - ignore_command_records: Filters out command records if present from the batch. - - Note: - With a session, you are able to read in a streaming fashion. If ``limit`` and/or ``until`` - were not provided and the tail of the stream is reached, the session goes into - real-time tailing mode and will yield records as they are appended to the stream. - - Yields: - Batch of sequenced records. - - (or) - - Tail of the stream. It will be yielded only if ``start`` exceeds the tail and ``clamp`` - was ``False``. - - Returns: - If ``limit`` and/or ``until`` were provided, and if there are no further records that - satisfy those constraints. - - (or) - - If the previous yield was the tail of the stream. - """ - request = read_session_request_message(self.name, start, limit, until, clamp) - max_attempts = self._config.max_retries - backoffs = compute_backoffs(max_attempts) - attempt = 0 - while True: - try: - async for response in self._stub.ReadSession( - request, - metadata=self._config.rpc.metadata, - compression=self._config.rpc.compression, - ): - if attempt > 0: - attempt = 0 - output = response.output - match output.WhichOneof("output"): - case "batch": - records = sequenced_records_schema( - output.batch, ignore_command_records - ) - _prepare_read_session_request_for_retry(request, records) - yield records - case "next_seq_num": - # TODO: use correct last_timestamp when migrating to v1 API. - yield schemas.Tail(output.next_seq_num, 0) - return - case _: - raise RuntimeError( - "Read output doesn't match any of the expected values" - ) - return - except Exception as e: - if attempt < max_attempts and _grpc_retry_on(e): - await asyncio.sleep(backoffs[attempt]) - attempt += 1 - else: - raise e diff --git a/src/streamstore/_lib/s2/v1alpha/s2_pb2.py b/src/streamstore/_lib/s2/v1alpha/s2_pb2.py deleted file mode 100644 index 5f3d685..0000000 --- a/src/streamstore/_lib/s2/v1alpha/s2_pb2.py +++ /dev/null @@ -1,228 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: s2/v1alpha/s2.proto -# Protobuf Python Version: 5.29.0 -"""Generated protocol buffer code.""" - -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder - -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, 5, 29, 0, "", "s2/v1alpha/s2.proto" -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n\x13s2/v1alpha/s2.proto\x12\ns2.v1alpha\x1a google/protobuf/field_mask.proto"V\n\x11ListBasinsRequest\x12\x0e\n\x06prefix\x18\x01 \x01(\t\x12\x13\n\x0bstart_after\x18\x02 \x01(\t\x12\x12\n\x05limit\x18\x03 \x01(\x04H\x00\x88\x01\x01\x42\x08\n\x06_limit"M\n\x12ListBasinsResponse\x12%\n\x06\x62\x61sins\x18\x01 \x03(\x0b\x32\x15.s2.v1alpha.BasinInfo\x12\x10\n\x08has_more\x18\x02 \x01(\x08"s\n\x12\x43reateBasinRequest\x12\r\n\x05\x62\x61sin\x18\x01 \x01(\t\x12\'\n\x06\x63onfig\x18\x02 \x01(\x0b\x32\x17.s2.v1alpha.BasinConfig\x12%\n\x05scope\x18\x03 \x01(\x0e\x32\x16.s2.v1alpha.BasinScope":\n\x13\x43reateBasinResponse\x12#\n\x04info\x18\x01 \x01(\x0b\x32\x15.s2.v1alpha.BasinInfo"#\n\x12\x44\x65leteBasinRequest\x12\r\n\x05\x62\x61sin\x18\x01 \x01(\t"\x15\n\x13\x44\x65leteBasinResponse"&\n\x15GetBasinConfigRequest\x12\r\n\x05\x62\x61sin\x18\x01 \x01(\t"A\n\x16GetBasinConfigResponse\x12\'\n\x06\x63onfig\x18\x01 \x01(\x0b\x32\x17.s2.v1alpha.BasinConfig"{\n\x17ReconfigureBasinRequest\x12\r\n\x05\x62\x61sin\x18\x01 \x01(\t\x12\'\n\x06\x63onfig\x18\x02 \x01(\x0b\x32\x17.s2.v1alpha.BasinConfig\x12(\n\x04mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"C\n\x18ReconfigureBasinResponse\x12\'\n\x06\x63onfig\x18\x01 \x01(\x0b\x32\x17.s2.v1alpha.BasinConfig"D\n\x17IssueAccessTokenRequest\x12)\n\x04info\x18\x01 \x01(\x0b\x32\x1b.s2.v1alpha.AccessTokenInfo"3\n\x14ReadWritePermissions\x12\x0c\n\x04read\x18\x01 \x01(\x08\x12\r\n\x05write\x18\x02 \x01(\x08"\xb0\x01\n\x18PermittedOperationGroups\x12\x31\n\x07\x61\x63\x63ount\x18\x01 \x01(\x0b\x32 .s2.v1alpha.ReadWritePermissions\x12/\n\x05\x62\x61sin\x18\x02 \x01(\x0b\x32 .s2.v1alpha.ReadWritePermissions\x12\x30\n\x06stream\x18\x03 \x01(\x0b\x32 .s2.v1alpha.ReadWritePermissions"&\n\x18RevokeAccessTokenRequest\x12\n\n\x02id\x18\x01 \x01(\t"F\n\x19RevokeAccessTokenResponse\x12)\n\x04info\x18\x01 \x01(\x0b\x32\x1b.s2.v1alpha.AccessTokenInfo"\\\n\x17ListAccessTokensRequest\x12\x0e\n\x06prefix\x18\x01 \x01(\t\x12\x13\n\x0bstart_after\x18\x02 \x01(\t\x12\x12\n\x05limit\x18\x03 \x01(\x04H\x00\x88\x01\x01\x42\x08\n\x06_limit"`\n\x18ListAccessTokensResponse\x12\x32\n\raccess_tokens\x18\x01 \x03(\x0b\x32\x1b.s2.v1alpha.AccessTokenInfo\x12\x10\n\x08has_more\x18\x02 \x01(\x08"\x8f\x01\n\x0f\x41\x63\x63\x65ssTokenInfo\x12\n\n\x02id\x18\x01 \x01(\t\x12\x17\n\nexpires_at\x18\x02 \x01(\rH\x00\x88\x01\x01\x12\x1b\n\x13\x61uto_prefix_streams\x18\x03 \x01(\x08\x12+\n\x05scope\x18\x04 \x01(\x0b\x32\x1c.s2.v1alpha.AccessTokenScopeB\r\n\x0b_expires_at"\xf2\x01\n\x10\x41\x63\x63\x65ssTokenScope\x12\'\n\x06\x62\x61sins\x18\x01 \x01(\x0b\x32\x17.s2.v1alpha.ResourceSet\x12(\n\x07streams\x18\x02 \x01(\x0b\x32\x17.s2.v1alpha.ResourceSet\x12.\n\raccess_tokens\x18\x03 \x01(\x0b\x32\x17.s2.v1alpha.ResourceSet\x12\x37\n\top_groups\x18\x04 \x01(\x0b\x32$.s2.v1alpha.PermittedOperationGroups\x12"\n\x03ops\x18\x05 \x03(\x0e\x32\x15.s2.v1alpha.Operation"<\n\x0bResourceSet\x12\x0f\n\x05\x65xact\x18\x01 \x01(\tH\x00\x12\x10\n\x06prefix\x18\x02 \x01(\tH\x00\x42\n\n\x08matching"0\n\x18IssueAccessTokenResponse\x12\x14\n\x0c\x61\x63\x63\x65ss_token\x18\x01 \x01(\t"V\n\nStreamInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ncreated_at\x18\x02 \x01(\r\x12\x17\n\ndeleted_at\x18\x03 \x01(\rH\x00\x88\x01\x01\x42\r\n\x0b_deleted_at"W\n\x12ListStreamsRequest\x12\x0e\n\x06prefix\x18\x01 \x01(\t\x12\x13\n\x0bstart_after\x18\x02 \x01(\t\x12\x12\n\x05limit\x18\x03 \x01(\x04H\x00\x88\x01\x01\x42\x08\n\x06_limit"P\n\x13ListStreamsResponse\x12\'\n\x07streams\x18\x01 \x03(\x0b\x32\x16.s2.v1alpha.StreamInfo\x12\x10\n\x08has_more\x18\x02 \x01(\x08"O\n\x13\x43reateStreamRequest\x12\x0e\n\x06stream\x18\x01 \x01(\t\x12(\n\x06\x63onfig\x18\x02 \x01(\x0b\x32\x18.s2.v1alpha.StreamConfig"<\n\x14\x43reateStreamResponse\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.s2.v1alpha.StreamInfo"%\n\x13\x44\x65leteStreamRequest\x12\x0e\n\x06stream\x18\x01 \x01(\t"\x16\n\x14\x44\x65leteStreamResponse"(\n\x16GetStreamConfigRequest\x12\x0e\n\x06stream\x18\x01 \x01(\t"C\n\x17GetStreamConfigResponse\x12(\n\x06\x63onfig\x18\x01 \x01(\x0b\x32\x18.s2.v1alpha.StreamConfig"~\n\x18ReconfigureStreamRequest\x12\x0e\n\x06stream\x18\x01 \x01(\t\x12(\n\x06\x63onfig\x18\x02 \x01(\x0b\x32\x18.s2.v1alpha.StreamConfig\x12(\n\x04mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"E\n\x19ReconfigureStreamResponse\x12(\n\x06\x63onfig\x18\x01 \x01(\x0b\x32\x18.s2.v1alpha.StreamConfig""\n\x10\x43heckTailRequest\x12\x0e\n\x06stream\x18\x01 \x01(\t"A\n\x11\x43heckTailResponse\x12\x14\n\x0cnext_seq_num\x18\x01 \x01(\x04\x12\x16\n\x0elast_timestamp\x18\x02 \x01(\x04"\xa4\x01\n\x0b\x41ppendInput\x12\x0e\n\x06stream\x18\x01 \x01(\t\x12)\n\x07records\x18\x02 \x03(\x0b\x32\x18.s2.v1alpha.AppendRecord\x12\x1a\n\rmatch_seq_num\x18\x03 \x01(\x04H\x00\x88\x01\x01\x12\x1a\n\rfencing_token\x18\x04 \x01(\tH\x01\x88\x01\x01\x42\x10\n\x0e_match_seq_numB\x10\n\x0e_fencing_token"\x98\x01\n\x0c\x41ppendOutput\x12\x15\n\rstart_seq_num\x18\x01 \x01(\x04\x12\x17\n\x0fstart_timestamp\x18\x04 \x01(\x04\x12\x13\n\x0b\x65nd_seq_num\x18\x02 \x01(\x04\x12\x15\n\rend_timestamp\x18\x05 \x01(\x04\x12\x14\n\x0cnext_seq_num\x18\x03 \x01(\x04\x12\x16\n\x0elast_timestamp\x18\x06 \x01(\x04"7\n\rAppendRequest\x12&\n\x05input\x18\x01 \x01(\x0b\x32\x17.s2.v1alpha.AppendInput":\n\x0e\x41ppendResponse\x12(\n\x06output\x18\x01 \x01(\x0b\x32\x18.s2.v1alpha.AppendOutput">\n\x14\x41ppendSessionRequest\x12&\n\x05input\x18\x01 \x01(\x0b\x32\x17.s2.v1alpha.AppendInput"A\n\x15\x41ppendSessionResponse\x12(\n\x06output\x18\x01 \x01(\x0b\x32\x18.s2.v1alpha.AppendOutput"g\n\nReadOutput\x12\x31\n\x05\x62\x61tch\x18\x01 \x01(\x0b\x32 .s2.v1alpha.SequencedRecordBatchH\x00\x12\x16\n\x0cnext_seq_num\x18\x03 \x01(\x04H\x00\x42\x08\n\x06outputJ\x04\x08\x02\x10\x03"\xb8\x01\n\x0bReadRequest\x12\x0e\n\x06stream\x18\x01 \x01(\t\x12\x11\n\x07seq_num\x18\x02 \x01(\x04H\x00\x12\x13\n\ttimestamp\x18\x04 \x01(\x04H\x00\x12\x15\n\x0btail_offset\x18\x05 \x01(\x04H\x00\x12$\n\x05limit\x18\x03 \x01(\x0b\x32\x15.s2.v1alpha.ReadLimit\x12\x12\n\x05until\x18\x06 \x01(\x04H\x01\x88\x01\x01\x12\r\n\x05\x63lamp\x18\x07 \x01(\x08\x42\x07\n\x05startB\x08\n\x06_until"6\n\x0cReadResponse\x12&\n\x06output\x18\x01 \x01(\x0b\x32\x16.s2.v1alpha.ReadOutput"G\n\tReadLimit\x12\x12\n\x05\x63ount\x18\x01 \x01(\x04H\x00\x88\x01\x01\x12\x12\n\x05\x62ytes\x18\x02 \x01(\x04H\x01\x88\x01\x01\x42\x08\n\x06_countB\x08\n\x06_bytes"\xd3\x01\n\x12ReadSessionRequest\x12\x0e\n\x06stream\x18\x01 \x01(\t\x12\x11\n\x07seq_num\x18\x02 \x01(\x04H\x00\x12\x13\n\ttimestamp\x18\x05 \x01(\x04H\x00\x12\x15\n\x0btail_offset\x18\x06 \x01(\x04H\x00\x12$\n\x05limit\x18\x03 \x01(\x0b\x32\x15.s2.v1alpha.ReadLimit\x12\x12\n\nheartbeats\x18\x04 \x01(\x08\x12\x12\n\x05until\x18\x07 \x01(\x04H\x01\x88\x01\x01\x12\r\n\x05\x63lamp\x18\x08 \x01(\x08\x42\x07\n\x05startB\x08\n\x06_until"M\n\x13ReadSessionResponse\x12+\n\x06output\x18\x01 \x01(\x0b\x32\x16.s2.v1alpha.ReadOutputH\x00\x88\x01\x01\x42\t\n\x07_output"\xc8\x03\n\x0cStreamConfig\x12/\n\rstorage_class\x18\x01 \x01(\x0e\x32\x18.s2.v1alpha.StorageClass\x12\r\n\x03\x61ge\x18\x02 \x01(\x04H\x00\x12>\n\x08infinite\x18\x07 \x01(\x0b\x32*.s2.v1alpha.StreamConfig.InfiniteRetentionH\x00\x12;\n\x0ctimestamping\x18\x05 \x01(\x0b\x32%.s2.v1alpha.StreamConfig.Timestamping\x12?\n\x0f\x64\x65lete_on_empty\x18\x06 \x01(\x0b\x32&.s2.v1alpha.StreamConfig.DeleteOnEmpty\x1a^\n\x0cTimestamping\x12*\n\x04mode\x18\x01 \x01(\x0e\x32\x1c.s2.v1alpha.TimestampingMode\x12\x15\n\x08uncapped\x18\x02 \x01(\x08H\x00\x88\x01\x01\x42\x0b\n\t_uncapped\x1a%\n\rDeleteOnEmpty\x12\x14\n\x0cmin_age_secs\x18\x01 \x01(\x04\x1a\x13\n\x11InfiniteRetentionB\x12\n\x10retention_policyJ\x04\x08\x03\x10\x04J\x04\x08\x04\x10\x05"\x86\x01\n\x0b\x42\x61sinConfig\x12\x37\n\x15\x64\x65\x66\x61ult_stream_config\x18\x01 \x01(\x0b\x32\x18.s2.v1alpha.StreamConfig\x12\x1f\n\x17\x63reate_stream_on_append\x18\x02 \x01(\x08\x12\x1d\n\x15\x63reate_stream_on_read\x18\x03 \x01(\x08"g\n\tBasinInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12%\n\x05scope\x18\x05 \x01(\x0e\x32\x16.s2.v1alpha.BasinScope\x12%\n\x05state\x18\x04 \x01(\x0e\x32\x16.s2.v1alpha.BasinState"%\n\x06Header\x12\x0c\n\x04name\x18\x01 \x01(\x0c\x12\r\n\x05value\x18\x02 \x01(\x0c"g\n\x0c\x41ppendRecord\x12\x16\n\ttimestamp\x18\x03 \x01(\x04H\x00\x88\x01\x01\x12#\n\x07headers\x18\x01 \x03(\x0b\x32\x12.s2.v1alpha.Header\x12\x0c\n\x04\x62ody\x18\x02 \x01(\x0c\x42\x0c\n\n_timestamp"h\n\x0fSequencedRecord\x12\x0f\n\x07seq_num\x18\x01 \x01(\x04\x12\x11\n\ttimestamp\x18\x04 \x01(\x04\x12#\n\x07headers\x18\x02 \x03(\x0b\x32\x12.s2.v1alpha.Header\x12\x0c\n\x04\x62ody\x18\x03 \x01(\x0c"D\n\x14SequencedRecordBatch\x12,\n\x07records\x18\x01 \x03(\x0b\x32\x1b.s2.v1alpha.SequencedRecord*H\n\nBasinScope\x12\x1b\n\x17\x42\x41SIN_SCOPE_UNSPECIFIED\x10\x00\x12\x1d\n\x19\x42\x41SIN_SCOPE_AWS_US_EAST_1\x10\x01*\x81\x05\n\tOperation\x12\x19\n\x15OPERATION_UNSPECIFIED\x10\x00\x12\x19\n\x15OPERATION_LIST_BASINS\x10\x01\x12\x1a\n\x16OPERATION_CREATE_BASIN\x10\x02\x12\x1a\n\x16OPERATION_DELETE_BASIN\x10\x03\x12\x1f\n\x1bOPERATION_RECONFIGURE_BASIN\x10\x04\x12\x1e\n\x1aOPERATION_GET_BASIN_CONFIG\x10\x05\x12 \n\x1cOPERATION_ISSUE_ACCESS_TOKEN\x10\x06\x12!\n\x1dOPERATION_REVOKE_ACCESS_TOKEN\x10\x07\x12 \n\x1cOPERATION_LIST_ACCESS_TOKENS\x10\x08\x12\x1a\n\x16OPERATION_LIST_STREAMS\x10\t\x12\x1b\n\x17OPERATION_CREATE_STREAM\x10\n\x12\x1b\n\x17OPERATION_DELETE_STREAM\x10\x0b\x12\x1f\n\x1bOPERATION_GET_STREAM_CONFIG\x10\x0c\x12 \n\x1cOPERATION_RECONFIGURE_STREAM\x10\r\x12\x18\n\x14OPERATION_CHECK_TAIL\x10\x0e\x12\x14\n\x10OPERATION_APPEND\x10\x0f\x12\x12\n\x0eOPERATION_READ\x10\x10\x12\x12\n\x0eOPERATION_TRIM\x10\x11\x12\x13\n\x0fOPERATION_FENCE\x10\x12\x12\x1d\n\x19OPERATION_ACCOUNT_METRICS\x10\x13\x12\x1b\n\x17OPERATION_BASIN_METRICS\x10\x14\x12\x1c\n\x18OPERATION_STREAM_METRICS\x10\x15*d\n\x0cStorageClass\x12\x1d\n\x19STORAGE_CLASS_UNSPECIFIED\x10\x00\x12\x1a\n\x16STORAGE_CLASS_STANDARD\x10\x01\x12\x19\n\x15STORAGE_CLASS_EXPRESS\x10\x02*\x9f\x01\n\x10TimestampingMode\x12!\n\x1dTIMESTAMPING_MODE_UNSPECIFIED\x10\x00\x12#\n\x1fTIMESTAMPING_MODE_CLIENT_PREFER\x10\x01\x12$\n TIMESTAMPING_MODE_CLIENT_REQUIRE\x10\x02\x12\x1d\n\x19TIMESTAMPING_MODE_ARRIVAL\x10\x03*u\n\nBasinState\x12\x1b\n\x17\x42\x41SIN_STATE_UNSPECIFIED\x10\x00\x12\x16\n\x12\x42\x41SIN_STATE_ACTIVE\x10\x01\x12\x18\n\x14\x42\x41SIN_STATE_CREATING\x10\x02\x12\x18\n\x14\x42\x41SIN_STATE_DELETING\x10\x03\x32\xf8\x05\n\x0e\x41\x63\x63ountService\x12P\n\nListBasins\x12\x1d.s2.v1alpha.ListBasinsRequest\x1a\x1e.s2.v1alpha.ListBasinsResponse"\x03\x90\x02\x01\x12S\n\x0b\x43reateBasin\x12\x1e.s2.v1alpha.CreateBasinRequest\x1a\x1f.s2.v1alpha.CreateBasinResponse"\x03\x90\x02\x02\x12S\n\x0b\x44\x65leteBasin\x12\x1e.s2.v1alpha.DeleteBasinRequest\x1a\x1f.s2.v1alpha.DeleteBasinResponse"\x03\x90\x02\x02\x12\x62\n\x10ReconfigureBasin\x12#.s2.v1alpha.ReconfigureBasinRequest\x1a$.s2.v1alpha.ReconfigureBasinResponse"\x03\x90\x02\x02\x12\\\n\x0eGetBasinConfig\x12!.s2.v1alpha.GetBasinConfigRequest\x1a".s2.v1alpha.GetBasinConfigResponse"\x03\x90\x02\x01\x12]\n\x10IssueAccessToken\x12#.s2.v1alpha.IssueAccessTokenRequest\x1a$.s2.v1alpha.IssueAccessTokenResponse\x12\x65\n\x11RevokeAccessToken\x12$.s2.v1alpha.RevokeAccessTokenRequest\x1a%.s2.v1alpha.RevokeAccessTokenResponse"\x03\x90\x02\x02\x12\x62\n\x10ListAccessTokens\x12#.s2.v1alpha.ListAccessTokensRequest\x1a$.s2.v1alpha.ListAccessTokensResponse"\x03\x90\x02\x01\x32\xdb\x03\n\x0c\x42\x61sinService\x12S\n\x0bListStreams\x12\x1e.s2.v1alpha.ListStreamsRequest\x1a\x1f.s2.v1alpha.ListStreamsResponse"\x03\x90\x02\x01\x12V\n\x0c\x43reateStream\x12\x1f.s2.v1alpha.CreateStreamRequest\x1a .s2.v1alpha.CreateStreamResponse"\x03\x90\x02\x02\x12V\n\x0c\x44\x65leteStream\x12\x1f.s2.v1alpha.DeleteStreamRequest\x1a .s2.v1alpha.DeleteStreamResponse"\x03\x90\x02\x02\x12_\n\x0fGetStreamConfig\x12".s2.v1alpha.GetStreamConfigRequest\x1a#.s2.v1alpha.GetStreamConfigResponse"\x03\x90\x02\x01\x12\x65\n\x11ReconfigureStream\x12$.s2.v1alpha.ReconfigureStreamRequest\x1a%.s2.v1alpha.ReconfigureStreamResponse"\x03\x90\x02\x02\x32\x90\x03\n\rStreamService\x12M\n\tCheckTail\x12\x1c.s2.v1alpha.CheckTailRequest\x1a\x1d.s2.v1alpha.CheckTailResponse"\x03\x90\x02\x01\x12?\n\x06\x41ppend\x12\x19.s2.v1alpha.AppendRequest\x1a\x1a.s2.v1alpha.AppendResponse\x12X\n\rAppendSession\x12 .s2.v1alpha.AppendSessionRequest\x1a!.s2.v1alpha.AppendSessionResponse(\x01\x30\x01\x12>\n\x04Read\x12\x17.s2.v1alpha.ReadRequest\x1a\x18.s2.v1alpha.ReadResponse"\x03\x90\x02\x01\x12U\n\x0bReadSession\x12\x1e.s2.v1alpha.ReadSessionRequest\x1a\x1f.s2.v1alpha.ReadSessionResponse"\x03\x90\x02\x01\x30\x01\x42\x0e\n\ns2.v1alphaP\x01\x62\x06proto3' -) - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "s2.v1alpha.s2_pb2", _globals) -if not _descriptor._USE_C_DESCRIPTORS: - _globals["DESCRIPTOR"]._loaded_options = None - _globals["DESCRIPTOR"]._serialized_options = b"\n\ns2.v1alphaP\001" - _globals["_ACCOUNTSERVICE"].methods_by_name["ListBasins"]._loaded_options = None - _globals["_ACCOUNTSERVICE"].methods_by_name[ - "ListBasins" - ]._serialized_options = b"\220\002\001" - _globals["_ACCOUNTSERVICE"].methods_by_name["CreateBasin"]._loaded_options = None - _globals["_ACCOUNTSERVICE"].methods_by_name[ - "CreateBasin" - ]._serialized_options = b"\220\002\002" - _globals["_ACCOUNTSERVICE"].methods_by_name["DeleteBasin"]._loaded_options = None - _globals["_ACCOUNTSERVICE"].methods_by_name[ - "DeleteBasin" - ]._serialized_options = b"\220\002\002" - _globals["_ACCOUNTSERVICE"].methods_by_name[ - "ReconfigureBasin" - ]._loaded_options = None - _globals["_ACCOUNTSERVICE"].methods_by_name[ - "ReconfigureBasin" - ]._serialized_options = b"\220\002\002" - _globals["_ACCOUNTSERVICE"].methods_by_name["GetBasinConfig"]._loaded_options = None - _globals["_ACCOUNTSERVICE"].methods_by_name[ - "GetBasinConfig" - ]._serialized_options = b"\220\002\001" - _globals["_ACCOUNTSERVICE"].methods_by_name[ - "RevokeAccessToken" - ]._loaded_options = None - _globals["_ACCOUNTSERVICE"].methods_by_name[ - "RevokeAccessToken" - ]._serialized_options = b"\220\002\002" - _globals["_ACCOUNTSERVICE"].methods_by_name[ - "ListAccessTokens" - ]._loaded_options = None - _globals["_ACCOUNTSERVICE"].methods_by_name[ - "ListAccessTokens" - ]._serialized_options = b"\220\002\001" - _globals["_BASINSERVICE"].methods_by_name["ListStreams"]._loaded_options = None - _globals["_BASINSERVICE"].methods_by_name[ - "ListStreams" - ]._serialized_options = b"\220\002\001" - _globals["_BASINSERVICE"].methods_by_name["CreateStream"]._loaded_options = None - _globals["_BASINSERVICE"].methods_by_name[ - "CreateStream" - ]._serialized_options = b"\220\002\002" - _globals["_BASINSERVICE"].methods_by_name["DeleteStream"]._loaded_options = None - _globals["_BASINSERVICE"].methods_by_name[ - "DeleteStream" - ]._serialized_options = b"\220\002\002" - _globals["_BASINSERVICE"].methods_by_name["GetStreamConfig"]._loaded_options = None - _globals["_BASINSERVICE"].methods_by_name[ - "GetStreamConfig" - ]._serialized_options = b"\220\002\001" - _globals["_BASINSERVICE"].methods_by_name[ - "ReconfigureStream" - ]._loaded_options = None - _globals["_BASINSERVICE"].methods_by_name[ - "ReconfigureStream" - ]._serialized_options = b"\220\002\002" - _globals["_STREAMSERVICE"].methods_by_name["CheckTail"]._loaded_options = None - _globals["_STREAMSERVICE"].methods_by_name[ - "CheckTail" - ]._serialized_options = b"\220\002\001" - _globals["_STREAMSERVICE"].methods_by_name["Read"]._loaded_options = None - _globals["_STREAMSERVICE"].methods_by_name[ - "Read" - ]._serialized_options = b"\220\002\001" - _globals["_STREAMSERVICE"].methods_by_name["ReadSession"]._loaded_options = None - _globals["_STREAMSERVICE"].methods_by_name[ - "ReadSession" - ]._serialized_options = b"\220\002\001" - _globals["_BASINSCOPE"]._serialized_start = 5066 - _globals["_BASINSCOPE"]._serialized_end = 5138 - _globals["_OPERATION"]._serialized_start = 5141 - _globals["_OPERATION"]._serialized_end = 5782 - _globals["_STORAGECLASS"]._serialized_start = 5784 - _globals["_STORAGECLASS"]._serialized_end = 5884 - _globals["_TIMESTAMPINGMODE"]._serialized_start = 5887 - _globals["_TIMESTAMPINGMODE"]._serialized_end = 6046 - _globals["_BASINSTATE"]._serialized_start = 6048 - _globals["_BASINSTATE"]._serialized_end = 6165 - _globals["_LISTBASINSREQUEST"]._serialized_start = 69 - _globals["_LISTBASINSREQUEST"]._serialized_end = 155 - _globals["_LISTBASINSRESPONSE"]._serialized_start = 157 - _globals["_LISTBASINSRESPONSE"]._serialized_end = 234 - _globals["_CREATEBASINREQUEST"]._serialized_start = 236 - _globals["_CREATEBASINREQUEST"]._serialized_end = 351 - _globals["_CREATEBASINRESPONSE"]._serialized_start = 353 - _globals["_CREATEBASINRESPONSE"]._serialized_end = 411 - _globals["_DELETEBASINREQUEST"]._serialized_start = 413 - _globals["_DELETEBASINREQUEST"]._serialized_end = 448 - _globals["_DELETEBASINRESPONSE"]._serialized_start = 450 - _globals["_DELETEBASINRESPONSE"]._serialized_end = 471 - _globals["_GETBASINCONFIGREQUEST"]._serialized_start = 473 - _globals["_GETBASINCONFIGREQUEST"]._serialized_end = 511 - _globals["_GETBASINCONFIGRESPONSE"]._serialized_start = 513 - _globals["_GETBASINCONFIGRESPONSE"]._serialized_end = 578 - _globals["_RECONFIGUREBASINREQUEST"]._serialized_start = 580 - _globals["_RECONFIGUREBASINREQUEST"]._serialized_end = 703 - _globals["_RECONFIGUREBASINRESPONSE"]._serialized_start = 705 - _globals["_RECONFIGUREBASINRESPONSE"]._serialized_end = 772 - _globals["_ISSUEACCESSTOKENREQUEST"]._serialized_start = 774 - _globals["_ISSUEACCESSTOKENREQUEST"]._serialized_end = 842 - _globals["_READWRITEPERMISSIONS"]._serialized_start = 844 - _globals["_READWRITEPERMISSIONS"]._serialized_end = 895 - _globals["_PERMITTEDOPERATIONGROUPS"]._serialized_start = 898 - _globals["_PERMITTEDOPERATIONGROUPS"]._serialized_end = 1074 - _globals["_REVOKEACCESSTOKENREQUEST"]._serialized_start = 1076 - _globals["_REVOKEACCESSTOKENREQUEST"]._serialized_end = 1114 - _globals["_REVOKEACCESSTOKENRESPONSE"]._serialized_start = 1116 - _globals["_REVOKEACCESSTOKENRESPONSE"]._serialized_end = 1186 - _globals["_LISTACCESSTOKENSREQUEST"]._serialized_start = 1188 - _globals["_LISTACCESSTOKENSREQUEST"]._serialized_end = 1280 - _globals["_LISTACCESSTOKENSRESPONSE"]._serialized_start = 1282 - _globals["_LISTACCESSTOKENSRESPONSE"]._serialized_end = 1378 - _globals["_ACCESSTOKENINFO"]._serialized_start = 1381 - _globals["_ACCESSTOKENINFO"]._serialized_end = 1524 - _globals["_ACCESSTOKENSCOPE"]._serialized_start = 1527 - _globals["_ACCESSTOKENSCOPE"]._serialized_end = 1769 - _globals["_RESOURCESET"]._serialized_start = 1771 - _globals["_RESOURCESET"]._serialized_end = 1831 - _globals["_ISSUEACCESSTOKENRESPONSE"]._serialized_start = 1833 - _globals["_ISSUEACCESSTOKENRESPONSE"]._serialized_end = 1881 - _globals["_STREAMINFO"]._serialized_start = 1883 - _globals["_STREAMINFO"]._serialized_end = 1969 - _globals["_LISTSTREAMSREQUEST"]._serialized_start = 1971 - _globals["_LISTSTREAMSREQUEST"]._serialized_end = 2058 - _globals["_LISTSTREAMSRESPONSE"]._serialized_start = 2060 - _globals["_LISTSTREAMSRESPONSE"]._serialized_end = 2140 - _globals["_CREATESTREAMREQUEST"]._serialized_start = 2142 - _globals["_CREATESTREAMREQUEST"]._serialized_end = 2221 - _globals["_CREATESTREAMRESPONSE"]._serialized_start = 2223 - _globals["_CREATESTREAMRESPONSE"]._serialized_end = 2283 - _globals["_DELETESTREAMREQUEST"]._serialized_start = 2285 - _globals["_DELETESTREAMREQUEST"]._serialized_end = 2322 - _globals["_DELETESTREAMRESPONSE"]._serialized_start = 2324 - _globals["_DELETESTREAMRESPONSE"]._serialized_end = 2346 - _globals["_GETSTREAMCONFIGREQUEST"]._serialized_start = 2348 - _globals["_GETSTREAMCONFIGREQUEST"]._serialized_end = 2388 - _globals["_GETSTREAMCONFIGRESPONSE"]._serialized_start = 2390 - _globals["_GETSTREAMCONFIGRESPONSE"]._serialized_end = 2457 - _globals["_RECONFIGURESTREAMREQUEST"]._serialized_start = 2459 - _globals["_RECONFIGURESTREAMREQUEST"]._serialized_end = 2585 - _globals["_RECONFIGURESTREAMRESPONSE"]._serialized_start = 2587 - _globals["_RECONFIGURESTREAMRESPONSE"]._serialized_end = 2656 - _globals["_CHECKTAILREQUEST"]._serialized_start = 2658 - _globals["_CHECKTAILREQUEST"]._serialized_end = 2692 - _globals["_CHECKTAILRESPONSE"]._serialized_start = 2694 - _globals["_CHECKTAILRESPONSE"]._serialized_end = 2759 - _globals["_APPENDINPUT"]._serialized_start = 2762 - _globals["_APPENDINPUT"]._serialized_end = 2926 - _globals["_APPENDOUTPUT"]._serialized_start = 2929 - _globals["_APPENDOUTPUT"]._serialized_end = 3081 - _globals["_APPENDREQUEST"]._serialized_start = 3083 - _globals["_APPENDREQUEST"]._serialized_end = 3138 - _globals["_APPENDRESPONSE"]._serialized_start = 3140 - _globals["_APPENDRESPONSE"]._serialized_end = 3198 - _globals["_APPENDSESSIONREQUEST"]._serialized_start = 3200 - _globals["_APPENDSESSIONREQUEST"]._serialized_end = 3262 - _globals["_APPENDSESSIONRESPONSE"]._serialized_start = 3264 - _globals["_APPENDSESSIONRESPONSE"]._serialized_end = 3329 - _globals["_READOUTPUT"]._serialized_start = 3331 - _globals["_READOUTPUT"]._serialized_end = 3434 - _globals["_READREQUEST"]._serialized_start = 3437 - _globals["_READREQUEST"]._serialized_end = 3621 - _globals["_READRESPONSE"]._serialized_start = 3623 - _globals["_READRESPONSE"]._serialized_end = 3677 - _globals["_READLIMIT"]._serialized_start = 3679 - _globals["_READLIMIT"]._serialized_end = 3750 - _globals["_READSESSIONREQUEST"]._serialized_start = 3753 - _globals["_READSESSIONREQUEST"]._serialized_end = 3964 - _globals["_READSESSIONRESPONSE"]._serialized_start = 3966 - _globals["_READSESSIONRESPONSE"]._serialized_end = 4043 - _globals["_STREAMCONFIG"]._serialized_start = 4046 - _globals["_STREAMCONFIG"]._serialized_end = 4502 - _globals["_STREAMCONFIG_TIMESTAMPING"]._serialized_start = 4316 - _globals["_STREAMCONFIG_TIMESTAMPING"]._serialized_end = 4410 - _globals["_STREAMCONFIG_DELETEONEMPTY"]._serialized_start = 4412 - _globals["_STREAMCONFIG_DELETEONEMPTY"]._serialized_end = 4449 - _globals["_STREAMCONFIG_INFINITERETENTION"]._serialized_start = 4451 - _globals["_STREAMCONFIG_INFINITERETENTION"]._serialized_end = 4470 - _globals["_BASINCONFIG"]._serialized_start = 4505 - _globals["_BASINCONFIG"]._serialized_end = 4639 - _globals["_BASININFO"]._serialized_start = 4641 - _globals["_BASININFO"]._serialized_end = 4744 - _globals["_HEADER"]._serialized_start = 4746 - _globals["_HEADER"]._serialized_end = 4783 - _globals["_APPENDRECORD"]._serialized_start = 4785 - _globals["_APPENDRECORD"]._serialized_end = 4888 - _globals["_SEQUENCEDRECORD"]._serialized_start = 4890 - _globals["_SEQUENCEDRECORD"]._serialized_end = 4994 - _globals["_SEQUENCEDRECORDBATCH"]._serialized_start = 4996 - _globals["_SEQUENCEDRECORDBATCH"]._serialized_end = 5064 - _globals["_ACCOUNTSERVICE"]._serialized_start = 6168 - _globals["_ACCOUNTSERVICE"]._serialized_end = 6928 - _globals["_BASINSERVICE"]._serialized_start = 6931 - _globals["_BASINSERVICE"]._serialized_end = 7406 - _globals["_STREAMSERVICE"]._serialized_start = 7409 - _globals["_STREAMSERVICE"]._serialized_end = 7809 -# @@protoc_insertion_point(module_scope) diff --git a/src/streamstore/_lib/s2/v1alpha/s2_pb2.pyi b/src/streamstore/_lib/s2/v1alpha/s2_pb2.pyi deleted file mode 100644 index c77b8fa..0000000 --- a/src/streamstore/_lib/s2/v1alpha/s2_pb2.pyi +++ /dev/null @@ -1,765 +0,0 @@ -from typing import ClassVar as _ClassVar -from typing import Iterable as _Iterable -from typing import Mapping as _Mapping -from typing import Optional as _Optional -from typing import Union as _Union - -from google.protobuf import descriptor as _descriptor -from google.protobuf import field_mask_pb2 as _field_mask_pb2 -from google.protobuf import message as _message -from google.protobuf.internal import containers as _containers -from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper - -DESCRIPTOR: _descriptor.FileDescriptor - -class BasinScope(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - BASIN_SCOPE_UNSPECIFIED: _ClassVar[BasinScope] - BASIN_SCOPE_AWS_US_EAST_1: _ClassVar[BasinScope] - -class Operation(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - OPERATION_UNSPECIFIED: _ClassVar[Operation] - OPERATION_LIST_BASINS: _ClassVar[Operation] - OPERATION_CREATE_BASIN: _ClassVar[Operation] - OPERATION_DELETE_BASIN: _ClassVar[Operation] - OPERATION_RECONFIGURE_BASIN: _ClassVar[Operation] - OPERATION_GET_BASIN_CONFIG: _ClassVar[Operation] - OPERATION_ISSUE_ACCESS_TOKEN: _ClassVar[Operation] - OPERATION_REVOKE_ACCESS_TOKEN: _ClassVar[Operation] - OPERATION_LIST_ACCESS_TOKENS: _ClassVar[Operation] - OPERATION_LIST_STREAMS: _ClassVar[Operation] - OPERATION_CREATE_STREAM: _ClassVar[Operation] - OPERATION_DELETE_STREAM: _ClassVar[Operation] - OPERATION_GET_STREAM_CONFIG: _ClassVar[Operation] - OPERATION_RECONFIGURE_STREAM: _ClassVar[Operation] - OPERATION_CHECK_TAIL: _ClassVar[Operation] - OPERATION_APPEND: _ClassVar[Operation] - OPERATION_READ: _ClassVar[Operation] - OPERATION_TRIM: _ClassVar[Operation] - OPERATION_FENCE: _ClassVar[Operation] - OPERATION_ACCOUNT_METRICS: _ClassVar[Operation] - OPERATION_BASIN_METRICS: _ClassVar[Operation] - OPERATION_STREAM_METRICS: _ClassVar[Operation] - -class StorageClass(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - STORAGE_CLASS_UNSPECIFIED: _ClassVar[StorageClass] - STORAGE_CLASS_STANDARD: _ClassVar[StorageClass] - STORAGE_CLASS_EXPRESS: _ClassVar[StorageClass] - -class TimestampingMode(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - TIMESTAMPING_MODE_UNSPECIFIED: _ClassVar[TimestampingMode] - TIMESTAMPING_MODE_CLIENT_PREFER: _ClassVar[TimestampingMode] - TIMESTAMPING_MODE_CLIENT_REQUIRE: _ClassVar[TimestampingMode] - TIMESTAMPING_MODE_ARRIVAL: _ClassVar[TimestampingMode] - -class BasinState(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): - __slots__ = () - BASIN_STATE_UNSPECIFIED: _ClassVar[BasinState] - BASIN_STATE_ACTIVE: _ClassVar[BasinState] - BASIN_STATE_CREATING: _ClassVar[BasinState] - BASIN_STATE_DELETING: _ClassVar[BasinState] - -BASIN_SCOPE_UNSPECIFIED: BasinScope -BASIN_SCOPE_AWS_US_EAST_1: BasinScope -OPERATION_UNSPECIFIED: Operation -OPERATION_LIST_BASINS: Operation -OPERATION_CREATE_BASIN: Operation -OPERATION_DELETE_BASIN: Operation -OPERATION_RECONFIGURE_BASIN: Operation -OPERATION_GET_BASIN_CONFIG: Operation -OPERATION_ISSUE_ACCESS_TOKEN: Operation -OPERATION_REVOKE_ACCESS_TOKEN: Operation -OPERATION_LIST_ACCESS_TOKENS: Operation -OPERATION_LIST_STREAMS: Operation -OPERATION_CREATE_STREAM: Operation -OPERATION_DELETE_STREAM: Operation -OPERATION_GET_STREAM_CONFIG: Operation -OPERATION_RECONFIGURE_STREAM: Operation -OPERATION_CHECK_TAIL: Operation -OPERATION_APPEND: Operation -OPERATION_READ: Operation -OPERATION_TRIM: Operation -OPERATION_FENCE: Operation -OPERATION_ACCOUNT_METRICS: Operation -OPERATION_BASIN_METRICS: Operation -OPERATION_STREAM_METRICS: Operation -STORAGE_CLASS_UNSPECIFIED: StorageClass -STORAGE_CLASS_STANDARD: StorageClass -STORAGE_CLASS_EXPRESS: StorageClass -TIMESTAMPING_MODE_UNSPECIFIED: TimestampingMode -TIMESTAMPING_MODE_CLIENT_PREFER: TimestampingMode -TIMESTAMPING_MODE_CLIENT_REQUIRE: TimestampingMode -TIMESTAMPING_MODE_ARRIVAL: TimestampingMode -BASIN_STATE_UNSPECIFIED: BasinState -BASIN_STATE_ACTIVE: BasinState -BASIN_STATE_CREATING: BasinState -BASIN_STATE_DELETING: BasinState - -class ListBasinsRequest(_message.Message): - __slots__ = ("prefix", "start_after", "limit") - PREFIX_FIELD_NUMBER: _ClassVar[int] - START_AFTER_FIELD_NUMBER: _ClassVar[int] - LIMIT_FIELD_NUMBER: _ClassVar[int] - prefix: str - start_after: str - limit: int - def __init__( - self, - prefix: _Optional[str] = ..., - start_after: _Optional[str] = ..., - limit: _Optional[int] = ..., - ) -> None: ... - -class ListBasinsResponse(_message.Message): - __slots__ = ("basins", "has_more") - BASINS_FIELD_NUMBER: _ClassVar[int] - HAS_MORE_FIELD_NUMBER: _ClassVar[int] - basins: _containers.RepeatedCompositeFieldContainer[BasinInfo] - has_more: bool - def __init__( - self, - basins: _Optional[_Iterable[_Union[BasinInfo, _Mapping]]] = ..., - has_more: bool = ..., - ) -> None: ... - -class CreateBasinRequest(_message.Message): - __slots__ = ("basin", "config", "scope") - BASIN_FIELD_NUMBER: _ClassVar[int] - CONFIG_FIELD_NUMBER: _ClassVar[int] - SCOPE_FIELD_NUMBER: _ClassVar[int] - basin: str - config: BasinConfig - scope: BasinScope - def __init__( - self, - basin: _Optional[str] = ..., - config: _Optional[_Union[BasinConfig, _Mapping]] = ..., - scope: _Optional[_Union[BasinScope, str]] = ..., - ) -> None: ... - -class CreateBasinResponse(_message.Message): - __slots__ = ("info",) - INFO_FIELD_NUMBER: _ClassVar[int] - info: BasinInfo - def __init__(self, info: _Optional[_Union[BasinInfo, _Mapping]] = ...) -> None: ... - -class DeleteBasinRequest(_message.Message): - __slots__ = ("basin",) - BASIN_FIELD_NUMBER: _ClassVar[int] - basin: str - def __init__(self, basin: _Optional[str] = ...) -> None: ... - -class DeleteBasinResponse(_message.Message): - __slots__ = () - def __init__(self) -> None: ... - -class GetBasinConfigRequest(_message.Message): - __slots__ = ("basin",) - BASIN_FIELD_NUMBER: _ClassVar[int] - basin: str - def __init__(self, basin: _Optional[str] = ...) -> None: ... - -class GetBasinConfigResponse(_message.Message): - __slots__ = ("config",) - CONFIG_FIELD_NUMBER: _ClassVar[int] - config: BasinConfig - def __init__( - self, config: _Optional[_Union[BasinConfig, _Mapping]] = ... - ) -> None: ... - -class ReconfigureBasinRequest(_message.Message): - __slots__ = ("basin", "config", "mask") - BASIN_FIELD_NUMBER: _ClassVar[int] - CONFIG_FIELD_NUMBER: _ClassVar[int] - MASK_FIELD_NUMBER: _ClassVar[int] - basin: str - config: BasinConfig - mask: _field_mask_pb2.FieldMask - def __init__( - self, - basin: _Optional[str] = ..., - config: _Optional[_Union[BasinConfig, _Mapping]] = ..., - mask: _Optional[_Union[_field_mask_pb2.FieldMask, _Mapping]] = ..., - ) -> None: ... - -class ReconfigureBasinResponse(_message.Message): - __slots__ = ("config",) - CONFIG_FIELD_NUMBER: _ClassVar[int] - config: BasinConfig - def __init__( - self, config: _Optional[_Union[BasinConfig, _Mapping]] = ... - ) -> None: ... - -class IssueAccessTokenRequest(_message.Message): - __slots__ = ("info",) - INFO_FIELD_NUMBER: _ClassVar[int] - info: AccessTokenInfo - def __init__( - self, info: _Optional[_Union[AccessTokenInfo, _Mapping]] = ... - ) -> None: ... - -class ReadWritePermissions(_message.Message): - __slots__ = ("read", "write") - READ_FIELD_NUMBER: _ClassVar[int] - WRITE_FIELD_NUMBER: _ClassVar[int] - read: bool - write: bool - def __init__(self, read: bool = ..., write: bool = ...) -> None: ... - -class PermittedOperationGroups(_message.Message): - __slots__ = ("account", "basin", "stream") - ACCOUNT_FIELD_NUMBER: _ClassVar[int] - BASIN_FIELD_NUMBER: _ClassVar[int] - STREAM_FIELD_NUMBER: _ClassVar[int] - account: ReadWritePermissions - basin: ReadWritePermissions - stream: ReadWritePermissions - def __init__( - self, - account: _Optional[_Union[ReadWritePermissions, _Mapping]] = ..., - basin: _Optional[_Union[ReadWritePermissions, _Mapping]] = ..., - stream: _Optional[_Union[ReadWritePermissions, _Mapping]] = ..., - ) -> None: ... - -class RevokeAccessTokenRequest(_message.Message): - __slots__ = ("id",) - ID_FIELD_NUMBER: _ClassVar[int] - id: str - def __init__(self, id: _Optional[str] = ...) -> None: ... - -class RevokeAccessTokenResponse(_message.Message): - __slots__ = ("info",) - INFO_FIELD_NUMBER: _ClassVar[int] - info: AccessTokenInfo - def __init__( - self, info: _Optional[_Union[AccessTokenInfo, _Mapping]] = ... - ) -> None: ... - -class ListAccessTokensRequest(_message.Message): - __slots__ = ("prefix", "start_after", "limit") - PREFIX_FIELD_NUMBER: _ClassVar[int] - START_AFTER_FIELD_NUMBER: _ClassVar[int] - LIMIT_FIELD_NUMBER: _ClassVar[int] - prefix: str - start_after: str - limit: int - def __init__( - self, - prefix: _Optional[str] = ..., - start_after: _Optional[str] = ..., - limit: _Optional[int] = ..., - ) -> None: ... - -class ListAccessTokensResponse(_message.Message): - __slots__ = ("access_tokens", "has_more") - ACCESS_TOKENS_FIELD_NUMBER: _ClassVar[int] - HAS_MORE_FIELD_NUMBER: _ClassVar[int] - access_tokens: _containers.RepeatedCompositeFieldContainer[AccessTokenInfo] - has_more: bool - def __init__( - self, - access_tokens: _Optional[_Iterable[_Union[AccessTokenInfo, _Mapping]]] = ..., - has_more: bool = ..., - ) -> None: ... - -class AccessTokenInfo(_message.Message): - __slots__ = ("id", "expires_at", "auto_prefix_streams", "scope") - ID_FIELD_NUMBER: _ClassVar[int] - EXPIRES_AT_FIELD_NUMBER: _ClassVar[int] - AUTO_PREFIX_STREAMS_FIELD_NUMBER: _ClassVar[int] - SCOPE_FIELD_NUMBER: _ClassVar[int] - id: str - expires_at: int - auto_prefix_streams: bool - scope: AccessTokenScope - def __init__( - self, - id: _Optional[str] = ..., - expires_at: _Optional[int] = ..., - auto_prefix_streams: bool = ..., - scope: _Optional[_Union[AccessTokenScope, _Mapping]] = ..., - ) -> None: ... - -class AccessTokenScope(_message.Message): - __slots__ = ("basins", "streams", "access_tokens", "op_groups", "ops") - BASINS_FIELD_NUMBER: _ClassVar[int] - STREAMS_FIELD_NUMBER: _ClassVar[int] - ACCESS_TOKENS_FIELD_NUMBER: _ClassVar[int] - OP_GROUPS_FIELD_NUMBER: _ClassVar[int] - OPS_FIELD_NUMBER: _ClassVar[int] - basins: ResourceSet - streams: ResourceSet - access_tokens: ResourceSet - op_groups: PermittedOperationGroups - ops: _containers.RepeatedScalarFieldContainer[Operation] - def __init__( - self, - basins: _Optional[_Union[ResourceSet, _Mapping]] = ..., - streams: _Optional[_Union[ResourceSet, _Mapping]] = ..., - access_tokens: _Optional[_Union[ResourceSet, _Mapping]] = ..., - op_groups: _Optional[_Union[PermittedOperationGroups, _Mapping]] = ..., - ops: _Optional[_Iterable[_Union[Operation, str]]] = ..., - ) -> None: ... - -class ResourceSet(_message.Message): - __slots__ = ("exact", "prefix") - EXACT_FIELD_NUMBER: _ClassVar[int] - PREFIX_FIELD_NUMBER: _ClassVar[int] - exact: str - prefix: str - def __init__( - self, exact: _Optional[str] = ..., prefix: _Optional[str] = ... - ) -> None: ... - -class IssueAccessTokenResponse(_message.Message): - __slots__ = ("access_token",) - ACCESS_TOKEN_FIELD_NUMBER: _ClassVar[int] - access_token: str - def __init__(self, access_token: _Optional[str] = ...) -> None: ... - -class StreamInfo(_message.Message): - __slots__ = ("name", "created_at", "deleted_at") - NAME_FIELD_NUMBER: _ClassVar[int] - CREATED_AT_FIELD_NUMBER: _ClassVar[int] - DELETED_AT_FIELD_NUMBER: _ClassVar[int] - name: str - created_at: int - deleted_at: int - def __init__( - self, - name: _Optional[str] = ..., - created_at: _Optional[int] = ..., - deleted_at: _Optional[int] = ..., - ) -> None: ... - -class ListStreamsRequest(_message.Message): - __slots__ = ("prefix", "start_after", "limit") - PREFIX_FIELD_NUMBER: _ClassVar[int] - START_AFTER_FIELD_NUMBER: _ClassVar[int] - LIMIT_FIELD_NUMBER: _ClassVar[int] - prefix: str - start_after: str - limit: int - def __init__( - self, - prefix: _Optional[str] = ..., - start_after: _Optional[str] = ..., - limit: _Optional[int] = ..., - ) -> None: ... - -class ListStreamsResponse(_message.Message): - __slots__ = ("streams", "has_more") - STREAMS_FIELD_NUMBER: _ClassVar[int] - HAS_MORE_FIELD_NUMBER: _ClassVar[int] - streams: _containers.RepeatedCompositeFieldContainer[StreamInfo] - has_more: bool - def __init__( - self, - streams: _Optional[_Iterable[_Union[StreamInfo, _Mapping]]] = ..., - has_more: bool = ..., - ) -> None: ... - -class CreateStreamRequest(_message.Message): - __slots__ = ("stream", "config") - STREAM_FIELD_NUMBER: _ClassVar[int] - CONFIG_FIELD_NUMBER: _ClassVar[int] - stream: str - config: StreamConfig - def __init__( - self, - stream: _Optional[str] = ..., - config: _Optional[_Union[StreamConfig, _Mapping]] = ..., - ) -> None: ... - -class CreateStreamResponse(_message.Message): - __slots__ = ("info",) - INFO_FIELD_NUMBER: _ClassVar[int] - info: StreamInfo - def __init__(self, info: _Optional[_Union[StreamInfo, _Mapping]] = ...) -> None: ... - -class DeleteStreamRequest(_message.Message): - __slots__ = ("stream",) - STREAM_FIELD_NUMBER: _ClassVar[int] - stream: str - def __init__(self, stream: _Optional[str] = ...) -> None: ... - -class DeleteStreamResponse(_message.Message): - __slots__ = () - def __init__(self) -> None: ... - -class GetStreamConfigRequest(_message.Message): - __slots__ = ("stream",) - STREAM_FIELD_NUMBER: _ClassVar[int] - stream: str - def __init__(self, stream: _Optional[str] = ...) -> None: ... - -class GetStreamConfigResponse(_message.Message): - __slots__ = ("config",) - CONFIG_FIELD_NUMBER: _ClassVar[int] - config: StreamConfig - def __init__( - self, config: _Optional[_Union[StreamConfig, _Mapping]] = ... - ) -> None: ... - -class ReconfigureStreamRequest(_message.Message): - __slots__ = ("stream", "config", "mask") - STREAM_FIELD_NUMBER: _ClassVar[int] - CONFIG_FIELD_NUMBER: _ClassVar[int] - MASK_FIELD_NUMBER: _ClassVar[int] - stream: str - config: StreamConfig - mask: _field_mask_pb2.FieldMask - def __init__( - self, - stream: _Optional[str] = ..., - config: _Optional[_Union[StreamConfig, _Mapping]] = ..., - mask: _Optional[_Union[_field_mask_pb2.FieldMask, _Mapping]] = ..., - ) -> None: ... - -class ReconfigureStreamResponse(_message.Message): - __slots__ = ("config",) - CONFIG_FIELD_NUMBER: _ClassVar[int] - config: StreamConfig - def __init__( - self, config: _Optional[_Union[StreamConfig, _Mapping]] = ... - ) -> None: ... - -class CheckTailRequest(_message.Message): - __slots__ = ("stream",) - STREAM_FIELD_NUMBER: _ClassVar[int] - stream: str - def __init__(self, stream: _Optional[str] = ...) -> None: ... - -class CheckTailResponse(_message.Message): - __slots__ = ("next_seq_num", "last_timestamp") - NEXT_SEQ_NUM_FIELD_NUMBER: _ClassVar[int] - LAST_TIMESTAMP_FIELD_NUMBER: _ClassVar[int] - next_seq_num: int - last_timestamp: int - def __init__( - self, next_seq_num: _Optional[int] = ..., last_timestamp: _Optional[int] = ... - ) -> None: ... - -class AppendInput(_message.Message): - __slots__ = ("stream", "records", "match_seq_num", "fencing_token") - STREAM_FIELD_NUMBER: _ClassVar[int] - RECORDS_FIELD_NUMBER: _ClassVar[int] - MATCH_SEQ_NUM_FIELD_NUMBER: _ClassVar[int] - FENCING_TOKEN_FIELD_NUMBER: _ClassVar[int] - stream: str - records: _containers.RepeatedCompositeFieldContainer[AppendRecord] - match_seq_num: int - fencing_token: str - def __init__( - self, - stream: _Optional[str] = ..., - records: _Optional[_Iterable[_Union[AppendRecord, _Mapping]]] = ..., - match_seq_num: _Optional[int] = ..., - fencing_token: _Optional[str] = ..., - ) -> None: ... - -class AppendOutput(_message.Message): - __slots__ = ( - "start_seq_num", - "start_timestamp", - "end_seq_num", - "end_timestamp", - "next_seq_num", - "last_timestamp", - ) - START_SEQ_NUM_FIELD_NUMBER: _ClassVar[int] - START_TIMESTAMP_FIELD_NUMBER: _ClassVar[int] - END_SEQ_NUM_FIELD_NUMBER: _ClassVar[int] - END_TIMESTAMP_FIELD_NUMBER: _ClassVar[int] - NEXT_SEQ_NUM_FIELD_NUMBER: _ClassVar[int] - LAST_TIMESTAMP_FIELD_NUMBER: _ClassVar[int] - start_seq_num: int - start_timestamp: int - end_seq_num: int - end_timestamp: int - next_seq_num: int - last_timestamp: int - def __init__( - self, - start_seq_num: _Optional[int] = ..., - start_timestamp: _Optional[int] = ..., - end_seq_num: _Optional[int] = ..., - end_timestamp: _Optional[int] = ..., - next_seq_num: _Optional[int] = ..., - last_timestamp: _Optional[int] = ..., - ) -> None: ... - -class AppendRequest(_message.Message): - __slots__ = ("input",) - INPUT_FIELD_NUMBER: _ClassVar[int] - input: AppendInput - def __init__( - self, input: _Optional[_Union[AppendInput, _Mapping]] = ... - ) -> None: ... - -class AppendResponse(_message.Message): - __slots__ = ("output",) - OUTPUT_FIELD_NUMBER: _ClassVar[int] - output: AppendOutput - def __init__( - self, output: _Optional[_Union[AppendOutput, _Mapping]] = ... - ) -> None: ... - -class AppendSessionRequest(_message.Message): - __slots__ = ("input",) - INPUT_FIELD_NUMBER: _ClassVar[int] - input: AppendInput - def __init__( - self, input: _Optional[_Union[AppendInput, _Mapping]] = ... - ) -> None: ... - -class AppendSessionResponse(_message.Message): - __slots__ = ("output",) - OUTPUT_FIELD_NUMBER: _ClassVar[int] - output: AppendOutput - def __init__( - self, output: _Optional[_Union[AppendOutput, _Mapping]] = ... - ) -> None: ... - -class ReadOutput(_message.Message): - __slots__ = ("batch", "next_seq_num") - BATCH_FIELD_NUMBER: _ClassVar[int] - NEXT_SEQ_NUM_FIELD_NUMBER: _ClassVar[int] - batch: SequencedRecordBatch - next_seq_num: int - def __init__( - self, - batch: _Optional[_Union[SequencedRecordBatch, _Mapping]] = ..., - next_seq_num: _Optional[int] = ..., - ) -> None: ... - -class ReadRequest(_message.Message): - __slots__ = ( - "stream", - "seq_num", - "timestamp", - "tail_offset", - "limit", - "until", - "clamp", - ) - STREAM_FIELD_NUMBER: _ClassVar[int] - SEQ_NUM_FIELD_NUMBER: _ClassVar[int] - TIMESTAMP_FIELD_NUMBER: _ClassVar[int] - TAIL_OFFSET_FIELD_NUMBER: _ClassVar[int] - LIMIT_FIELD_NUMBER: _ClassVar[int] - UNTIL_FIELD_NUMBER: _ClassVar[int] - CLAMP_FIELD_NUMBER: _ClassVar[int] - stream: str - seq_num: int - timestamp: int - tail_offset: int - limit: ReadLimit - until: int - clamp: bool - def __init__( - self, - stream: _Optional[str] = ..., - seq_num: _Optional[int] = ..., - timestamp: _Optional[int] = ..., - tail_offset: _Optional[int] = ..., - limit: _Optional[_Union[ReadLimit, _Mapping]] = ..., - until: _Optional[int] = ..., - clamp: bool = ..., - ) -> None: ... - -class ReadResponse(_message.Message): - __slots__ = ("output",) - OUTPUT_FIELD_NUMBER: _ClassVar[int] - output: ReadOutput - def __init__( - self, output: _Optional[_Union[ReadOutput, _Mapping]] = ... - ) -> None: ... - -class ReadLimit(_message.Message): - __slots__ = ("count", "bytes") - COUNT_FIELD_NUMBER: _ClassVar[int] - BYTES_FIELD_NUMBER: _ClassVar[int] - count: int - bytes: int - def __init__( - self, count: _Optional[int] = ..., bytes: _Optional[int] = ... - ) -> None: ... - -class ReadSessionRequest(_message.Message): - __slots__ = ( - "stream", - "seq_num", - "timestamp", - "tail_offset", - "limit", - "heartbeats", - "until", - "clamp", - ) - STREAM_FIELD_NUMBER: _ClassVar[int] - SEQ_NUM_FIELD_NUMBER: _ClassVar[int] - TIMESTAMP_FIELD_NUMBER: _ClassVar[int] - TAIL_OFFSET_FIELD_NUMBER: _ClassVar[int] - LIMIT_FIELD_NUMBER: _ClassVar[int] - HEARTBEATS_FIELD_NUMBER: _ClassVar[int] - UNTIL_FIELD_NUMBER: _ClassVar[int] - CLAMP_FIELD_NUMBER: _ClassVar[int] - stream: str - seq_num: int - timestamp: int - tail_offset: int - limit: ReadLimit - heartbeats: bool - until: int - clamp: bool - def __init__( - self, - stream: _Optional[str] = ..., - seq_num: _Optional[int] = ..., - timestamp: _Optional[int] = ..., - tail_offset: _Optional[int] = ..., - limit: _Optional[_Union[ReadLimit, _Mapping]] = ..., - heartbeats: bool = ..., - until: _Optional[int] = ..., - clamp: bool = ..., - ) -> None: ... - -class ReadSessionResponse(_message.Message): - __slots__ = ("output",) - OUTPUT_FIELD_NUMBER: _ClassVar[int] - output: ReadOutput - def __init__( - self, output: _Optional[_Union[ReadOutput, _Mapping]] = ... - ) -> None: ... - -class StreamConfig(_message.Message): - __slots__ = ("storage_class", "age", "infinite", "timestamping", "delete_on_empty") - class Timestamping(_message.Message): - __slots__ = ("mode", "uncapped") - MODE_FIELD_NUMBER: _ClassVar[int] - UNCAPPED_FIELD_NUMBER: _ClassVar[int] - mode: TimestampingMode - uncapped: bool - def __init__( - self, - mode: _Optional[_Union[TimestampingMode, str]] = ..., - uncapped: bool = ..., - ) -> None: ... - - class DeleteOnEmpty(_message.Message): - __slots__ = ("min_age_secs",) - MIN_AGE_SECS_FIELD_NUMBER: _ClassVar[int] - min_age_secs: int - def __init__(self, min_age_secs: _Optional[int] = ...) -> None: ... - - class InfiniteRetention(_message.Message): - __slots__ = () - def __init__(self) -> None: ... - - STORAGE_CLASS_FIELD_NUMBER: _ClassVar[int] - AGE_FIELD_NUMBER: _ClassVar[int] - INFINITE_FIELD_NUMBER: _ClassVar[int] - TIMESTAMPING_FIELD_NUMBER: _ClassVar[int] - DELETE_ON_EMPTY_FIELD_NUMBER: _ClassVar[int] - storage_class: StorageClass - age: int - infinite: StreamConfig.InfiniteRetention - timestamping: StreamConfig.Timestamping - delete_on_empty: StreamConfig.DeleteOnEmpty - def __init__( - self, - storage_class: _Optional[_Union[StorageClass, str]] = ..., - age: _Optional[int] = ..., - infinite: _Optional[_Union[StreamConfig.InfiniteRetention, _Mapping]] = ..., - timestamping: _Optional[_Union[StreamConfig.Timestamping, _Mapping]] = ..., - delete_on_empty: _Optional[_Union[StreamConfig.DeleteOnEmpty, _Mapping]] = ..., - ) -> None: ... - -class BasinConfig(_message.Message): - __slots__ = ( - "default_stream_config", - "create_stream_on_append", - "create_stream_on_read", - ) - DEFAULT_STREAM_CONFIG_FIELD_NUMBER: _ClassVar[int] - CREATE_STREAM_ON_APPEND_FIELD_NUMBER: _ClassVar[int] - CREATE_STREAM_ON_READ_FIELD_NUMBER: _ClassVar[int] - default_stream_config: StreamConfig - create_stream_on_append: bool - create_stream_on_read: bool - def __init__( - self, - default_stream_config: _Optional[_Union[StreamConfig, _Mapping]] = ..., - create_stream_on_append: bool = ..., - create_stream_on_read: bool = ..., - ) -> None: ... - -class BasinInfo(_message.Message): - __slots__ = ("name", "scope", "state") - NAME_FIELD_NUMBER: _ClassVar[int] - SCOPE_FIELD_NUMBER: _ClassVar[int] - STATE_FIELD_NUMBER: _ClassVar[int] - name: str - scope: BasinScope - state: BasinState - def __init__( - self, - name: _Optional[str] = ..., - scope: _Optional[_Union[BasinScope, str]] = ..., - state: _Optional[_Union[BasinState, str]] = ..., - ) -> None: ... - -class Header(_message.Message): - __slots__ = ("name", "value") - NAME_FIELD_NUMBER: _ClassVar[int] - VALUE_FIELD_NUMBER: _ClassVar[int] - name: bytes - value: bytes - def __init__( - self, name: _Optional[bytes] = ..., value: _Optional[bytes] = ... - ) -> None: ... - -class AppendRecord(_message.Message): - __slots__ = ("timestamp", "headers", "body") - TIMESTAMP_FIELD_NUMBER: _ClassVar[int] - HEADERS_FIELD_NUMBER: _ClassVar[int] - BODY_FIELD_NUMBER: _ClassVar[int] - timestamp: int - headers: _containers.RepeatedCompositeFieldContainer[Header] - body: bytes - def __init__( - self, - timestamp: _Optional[int] = ..., - headers: _Optional[_Iterable[_Union[Header, _Mapping]]] = ..., - body: _Optional[bytes] = ..., - ) -> None: ... - -class SequencedRecord(_message.Message): - __slots__ = ("seq_num", "timestamp", "headers", "body") - SEQ_NUM_FIELD_NUMBER: _ClassVar[int] - TIMESTAMP_FIELD_NUMBER: _ClassVar[int] - HEADERS_FIELD_NUMBER: _ClassVar[int] - BODY_FIELD_NUMBER: _ClassVar[int] - seq_num: int - timestamp: int - headers: _containers.RepeatedCompositeFieldContainer[Header] - body: bytes - def __init__( - self, - seq_num: _Optional[int] = ..., - timestamp: _Optional[int] = ..., - headers: _Optional[_Iterable[_Union[Header, _Mapping]]] = ..., - body: _Optional[bytes] = ..., - ) -> None: ... - -class SequencedRecordBatch(_message.Message): - __slots__ = ("records",) - RECORDS_FIELD_NUMBER: _ClassVar[int] - records: _containers.RepeatedCompositeFieldContainer[SequencedRecord] - def __init__( - self, records: _Optional[_Iterable[_Union[SequencedRecord, _Mapping]]] = ... - ) -> None: ... diff --git a/src/streamstore/_lib/s2/v1alpha/s2_pb2_grpc.py b/src/streamstore/_lib/s2/v1alpha/s2_pb2_grpc.py deleted file mode 100644 index bc2f816..0000000 --- a/src/streamstore/_lib/s2/v1alpha/s2_pb2_grpc.py +++ /dev/null @@ -1,980 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" - -import grpc - -from streamstore._lib.s2.v1alpha import s2_pb2 as s2_dot_v1alpha_dot_s2__pb2 - -GRPC_GENERATED_VERSION = "1.69.0" -GRPC_VERSION = grpc.__version__ -_version_not_supported = False - -try: - from grpc._utilities import first_version_is_lower - - _version_not_supported = first_version_is_lower( - GRPC_VERSION, GRPC_GENERATED_VERSION - ) -except ImportError: - _version_not_supported = True - -if _version_not_supported: - raise RuntimeError( - f"The grpc package installed is at version {GRPC_VERSION}," - + " but the generated code in s2/v1alpha/s2_pb2_grpc.py depends on" - + f" grpcio>={GRPC_GENERATED_VERSION}." - + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" - + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." - ) - - -class AccountServiceStub(object): - """Operate on an S2 account.""" - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ListBasins = channel.unary_unary( - "/s2.v1alpha.AccountService/ListBasins", - request_serializer=s2_dot_v1alpha_dot_s2__pb2.ListBasinsRequest.SerializeToString, - response_deserializer=s2_dot_v1alpha_dot_s2__pb2.ListBasinsResponse.FromString, - _registered_method=True, - ) - self.CreateBasin = channel.unary_unary( - "/s2.v1alpha.AccountService/CreateBasin", - request_serializer=s2_dot_v1alpha_dot_s2__pb2.CreateBasinRequest.SerializeToString, - response_deserializer=s2_dot_v1alpha_dot_s2__pb2.CreateBasinResponse.FromString, - _registered_method=True, - ) - self.DeleteBasin = channel.unary_unary( - "/s2.v1alpha.AccountService/DeleteBasin", - request_serializer=s2_dot_v1alpha_dot_s2__pb2.DeleteBasinRequest.SerializeToString, - response_deserializer=s2_dot_v1alpha_dot_s2__pb2.DeleteBasinResponse.FromString, - _registered_method=True, - ) - self.ReconfigureBasin = channel.unary_unary( - "/s2.v1alpha.AccountService/ReconfigureBasin", - request_serializer=s2_dot_v1alpha_dot_s2__pb2.ReconfigureBasinRequest.SerializeToString, - response_deserializer=s2_dot_v1alpha_dot_s2__pb2.ReconfigureBasinResponse.FromString, - _registered_method=True, - ) - self.GetBasinConfig = channel.unary_unary( - "/s2.v1alpha.AccountService/GetBasinConfig", - request_serializer=s2_dot_v1alpha_dot_s2__pb2.GetBasinConfigRequest.SerializeToString, - response_deserializer=s2_dot_v1alpha_dot_s2__pb2.GetBasinConfigResponse.FromString, - _registered_method=True, - ) - self.IssueAccessToken = channel.unary_unary( - "/s2.v1alpha.AccountService/IssueAccessToken", - request_serializer=s2_dot_v1alpha_dot_s2__pb2.IssueAccessTokenRequest.SerializeToString, - response_deserializer=s2_dot_v1alpha_dot_s2__pb2.IssueAccessTokenResponse.FromString, - _registered_method=True, - ) - self.RevokeAccessToken = channel.unary_unary( - "/s2.v1alpha.AccountService/RevokeAccessToken", - request_serializer=s2_dot_v1alpha_dot_s2__pb2.RevokeAccessTokenRequest.SerializeToString, - response_deserializer=s2_dot_v1alpha_dot_s2__pb2.RevokeAccessTokenResponse.FromString, - _registered_method=True, - ) - self.ListAccessTokens = channel.unary_unary( - "/s2.v1alpha.AccountService/ListAccessTokens", - request_serializer=s2_dot_v1alpha_dot_s2__pb2.ListAccessTokensRequest.SerializeToString, - response_deserializer=s2_dot_v1alpha_dot_s2__pb2.ListAccessTokensResponse.FromString, - _registered_method=True, - ) - - -class AccountServiceServicer(object): - """Operate on an S2 account.""" - - def ListBasins(self, request, context): - """List basins.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateBasin(self, request, context): - """Create a new basin. - Provide a client request token with the `S2-Request-Token` header for idempotent retry behaviour. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteBasin(self, request, context): - """Delete a basin. - Basin deletion is asynchronous, and may take a few minutes to complete. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ReconfigureBasin(self, request, context): - """Update basin configuration.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetBasinConfig(self, request, context): - """Get basin configuration.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def IssueAccessToken(self, request, context): - """Issue a new access token.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def RevokeAccessToken(self, request, context): - """Revoke an access token.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListAccessTokens(self, request, context): - """List access tokens.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_AccountServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "ListBasins": grpc.unary_unary_rpc_method_handler( - servicer.ListBasins, - request_deserializer=s2_dot_v1alpha_dot_s2__pb2.ListBasinsRequest.FromString, - response_serializer=s2_dot_v1alpha_dot_s2__pb2.ListBasinsResponse.SerializeToString, - ), - "CreateBasin": grpc.unary_unary_rpc_method_handler( - servicer.CreateBasin, - request_deserializer=s2_dot_v1alpha_dot_s2__pb2.CreateBasinRequest.FromString, - response_serializer=s2_dot_v1alpha_dot_s2__pb2.CreateBasinResponse.SerializeToString, - ), - "DeleteBasin": grpc.unary_unary_rpc_method_handler( - servicer.DeleteBasin, - request_deserializer=s2_dot_v1alpha_dot_s2__pb2.DeleteBasinRequest.FromString, - response_serializer=s2_dot_v1alpha_dot_s2__pb2.DeleteBasinResponse.SerializeToString, - ), - "ReconfigureBasin": grpc.unary_unary_rpc_method_handler( - servicer.ReconfigureBasin, - request_deserializer=s2_dot_v1alpha_dot_s2__pb2.ReconfigureBasinRequest.FromString, - response_serializer=s2_dot_v1alpha_dot_s2__pb2.ReconfigureBasinResponse.SerializeToString, - ), - "GetBasinConfig": grpc.unary_unary_rpc_method_handler( - servicer.GetBasinConfig, - request_deserializer=s2_dot_v1alpha_dot_s2__pb2.GetBasinConfigRequest.FromString, - response_serializer=s2_dot_v1alpha_dot_s2__pb2.GetBasinConfigResponse.SerializeToString, - ), - "IssueAccessToken": grpc.unary_unary_rpc_method_handler( - servicer.IssueAccessToken, - request_deserializer=s2_dot_v1alpha_dot_s2__pb2.IssueAccessTokenRequest.FromString, - response_serializer=s2_dot_v1alpha_dot_s2__pb2.IssueAccessTokenResponse.SerializeToString, - ), - "RevokeAccessToken": grpc.unary_unary_rpc_method_handler( - servicer.RevokeAccessToken, - request_deserializer=s2_dot_v1alpha_dot_s2__pb2.RevokeAccessTokenRequest.FromString, - response_serializer=s2_dot_v1alpha_dot_s2__pb2.RevokeAccessTokenResponse.SerializeToString, - ), - "ListAccessTokens": grpc.unary_unary_rpc_method_handler( - servicer.ListAccessTokens, - request_deserializer=s2_dot_v1alpha_dot_s2__pb2.ListAccessTokensRequest.FromString, - response_serializer=s2_dot_v1alpha_dot_s2__pb2.ListAccessTokensResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "s2.v1alpha.AccountService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) - server.add_registered_method_handlers( - "s2.v1alpha.AccountService", rpc_method_handlers - ) - - -# This class is part of an EXPERIMENTAL API. -class AccountService(object): - """Operate on an S2 account.""" - - @staticmethod - def ListBasins( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/s2.v1alpha.AccountService/ListBasins", - s2_dot_v1alpha_dot_s2__pb2.ListBasinsRequest.SerializeToString, - s2_dot_v1alpha_dot_s2__pb2.ListBasinsResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True, - ) - - @staticmethod - def CreateBasin( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/s2.v1alpha.AccountService/CreateBasin", - s2_dot_v1alpha_dot_s2__pb2.CreateBasinRequest.SerializeToString, - s2_dot_v1alpha_dot_s2__pb2.CreateBasinResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True, - ) - - @staticmethod - def DeleteBasin( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/s2.v1alpha.AccountService/DeleteBasin", - s2_dot_v1alpha_dot_s2__pb2.DeleteBasinRequest.SerializeToString, - s2_dot_v1alpha_dot_s2__pb2.DeleteBasinResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True, - ) - - @staticmethod - def ReconfigureBasin( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/s2.v1alpha.AccountService/ReconfigureBasin", - s2_dot_v1alpha_dot_s2__pb2.ReconfigureBasinRequest.SerializeToString, - s2_dot_v1alpha_dot_s2__pb2.ReconfigureBasinResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True, - ) - - @staticmethod - def GetBasinConfig( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/s2.v1alpha.AccountService/GetBasinConfig", - s2_dot_v1alpha_dot_s2__pb2.GetBasinConfigRequest.SerializeToString, - s2_dot_v1alpha_dot_s2__pb2.GetBasinConfigResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True, - ) - - @staticmethod - def IssueAccessToken( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/s2.v1alpha.AccountService/IssueAccessToken", - s2_dot_v1alpha_dot_s2__pb2.IssueAccessTokenRequest.SerializeToString, - s2_dot_v1alpha_dot_s2__pb2.IssueAccessTokenResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True, - ) - - @staticmethod - def RevokeAccessToken( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/s2.v1alpha.AccountService/RevokeAccessToken", - s2_dot_v1alpha_dot_s2__pb2.RevokeAccessTokenRequest.SerializeToString, - s2_dot_v1alpha_dot_s2__pb2.RevokeAccessTokenResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True, - ) - - @staticmethod - def ListAccessTokens( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/s2.v1alpha.AccountService/ListAccessTokens", - s2_dot_v1alpha_dot_s2__pb2.ListAccessTokensRequest.SerializeToString, - s2_dot_v1alpha_dot_s2__pb2.ListAccessTokensResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True, - ) - - -class BasinServiceStub(object): - """Operate on an S2 basin.""" - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ListStreams = channel.unary_unary( - "/s2.v1alpha.BasinService/ListStreams", - request_serializer=s2_dot_v1alpha_dot_s2__pb2.ListStreamsRequest.SerializeToString, - response_deserializer=s2_dot_v1alpha_dot_s2__pb2.ListStreamsResponse.FromString, - _registered_method=True, - ) - self.CreateStream = channel.unary_unary( - "/s2.v1alpha.BasinService/CreateStream", - request_serializer=s2_dot_v1alpha_dot_s2__pb2.CreateStreamRequest.SerializeToString, - response_deserializer=s2_dot_v1alpha_dot_s2__pb2.CreateStreamResponse.FromString, - _registered_method=True, - ) - self.DeleteStream = channel.unary_unary( - "/s2.v1alpha.BasinService/DeleteStream", - request_serializer=s2_dot_v1alpha_dot_s2__pb2.DeleteStreamRequest.SerializeToString, - response_deserializer=s2_dot_v1alpha_dot_s2__pb2.DeleteStreamResponse.FromString, - _registered_method=True, - ) - self.GetStreamConfig = channel.unary_unary( - "/s2.v1alpha.BasinService/GetStreamConfig", - request_serializer=s2_dot_v1alpha_dot_s2__pb2.GetStreamConfigRequest.SerializeToString, - response_deserializer=s2_dot_v1alpha_dot_s2__pb2.GetStreamConfigResponse.FromString, - _registered_method=True, - ) - self.ReconfigureStream = channel.unary_unary( - "/s2.v1alpha.BasinService/ReconfigureStream", - request_serializer=s2_dot_v1alpha_dot_s2__pb2.ReconfigureStreamRequest.SerializeToString, - response_deserializer=s2_dot_v1alpha_dot_s2__pb2.ReconfigureStreamResponse.FromString, - _registered_method=True, - ) - - -class BasinServiceServicer(object): - """Operate on an S2 basin.""" - - def ListStreams(self, request, context): - """List streams.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateStream(self, request, context): - """Create a stream. - Provide a client request token with the `S2-Request-Token` header for idempotent retry behaviour. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteStream(self, request, context): - """Delete a stream. - Stream deletion is asynchronous, and may take a few minutes to complete. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetStreamConfig(self, request, context): - """Get stream configuration.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ReconfigureStream(self, request, context): - """Update stream configuration.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_BasinServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "ListStreams": grpc.unary_unary_rpc_method_handler( - servicer.ListStreams, - request_deserializer=s2_dot_v1alpha_dot_s2__pb2.ListStreamsRequest.FromString, - response_serializer=s2_dot_v1alpha_dot_s2__pb2.ListStreamsResponse.SerializeToString, - ), - "CreateStream": grpc.unary_unary_rpc_method_handler( - servicer.CreateStream, - request_deserializer=s2_dot_v1alpha_dot_s2__pb2.CreateStreamRequest.FromString, - response_serializer=s2_dot_v1alpha_dot_s2__pb2.CreateStreamResponse.SerializeToString, - ), - "DeleteStream": grpc.unary_unary_rpc_method_handler( - servicer.DeleteStream, - request_deserializer=s2_dot_v1alpha_dot_s2__pb2.DeleteStreamRequest.FromString, - response_serializer=s2_dot_v1alpha_dot_s2__pb2.DeleteStreamResponse.SerializeToString, - ), - "GetStreamConfig": grpc.unary_unary_rpc_method_handler( - servicer.GetStreamConfig, - request_deserializer=s2_dot_v1alpha_dot_s2__pb2.GetStreamConfigRequest.FromString, - response_serializer=s2_dot_v1alpha_dot_s2__pb2.GetStreamConfigResponse.SerializeToString, - ), - "ReconfigureStream": grpc.unary_unary_rpc_method_handler( - servicer.ReconfigureStream, - request_deserializer=s2_dot_v1alpha_dot_s2__pb2.ReconfigureStreamRequest.FromString, - response_serializer=s2_dot_v1alpha_dot_s2__pb2.ReconfigureStreamResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "s2.v1alpha.BasinService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) - server.add_registered_method_handlers( - "s2.v1alpha.BasinService", rpc_method_handlers - ) - - -# This class is part of an EXPERIMENTAL API. -class BasinService(object): - """Operate on an S2 basin.""" - - @staticmethod - def ListStreams( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/s2.v1alpha.BasinService/ListStreams", - s2_dot_v1alpha_dot_s2__pb2.ListStreamsRequest.SerializeToString, - s2_dot_v1alpha_dot_s2__pb2.ListStreamsResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True, - ) - - @staticmethod - def CreateStream( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/s2.v1alpha.BasinService/CreateStream", - s2_dot_v1alpha_dot_s2__pb2.CreateStreamRequest.SerializeToString, - s2_dot_v1alpha_dot_s2__pb2.CreateStreamResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True, - ) - - @staticmethod - def DeleteStream( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/s2.v1alpha.BasinService/DeleteStream", - s2_dot_v1alpha_dot_s2__pb2.DeleteStreamRequest.SerializeToString, - s2_dot_v1alpha_dot_s2__pb2.DeleteStreamResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True, - ) - - @staticmethod - def GetStreamConfig( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/s2.v1alpha.BasinService/GetStreamConfig", - s2_dot_v1alpha_dot_s2__pb2.GetStreamConfigRequest.SerializeToString, - s2_dot_v1alpha_dot_s2__pb2.GetStreamConfigResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True, - ) - - @staticmethod - def ReconfigureStream( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/s2.v1alpha.BasinService/ReconfigureStream", - s2_dot_v1alpha_dot_s2__pb2.ReconfigureStreamRequest.SerializeToString, - s2_dot_v1alpha_dot_s2__pb2.ReconfigureStreamResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True, - ) - - -class StreamServiceStub(object): - """Operate on an S2 stream.""" - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.CheckTail = channel.unary_unary( - "/s2.v1alpha.StreamService/CheckTail", - request_serializer=s2_dot_v1alpha_dot_s2__pb2.CheckTailRequest.SerializeToString, - response_deserializer=s2_dot_v1alpha_dot_s2__pb2.CheckTailResponse.FromString, - _registered_method=True, - ) - self.Append = channel.unary_unary( - "/s2.v1alpha.StreamService/Append", - request_serializer=s2_dot_v1alpha_dot_s2__pb2.AppendRequest.SerializeToString, - response_deserializer=s2_dot_v1alpha_dot_s2__pb2.AppendResponse.FromString, - _registered_method=True, - ) - self.AppendSession = channel.stream_stream( - "/s2.v1alpha.StreamService/AppendSession", - request_serializer=s2_dot_v1alpha_dot_s2__pb2.AppendSessionRequest.SerializeToString, - response_deserializer=s2_dot_v1alpha_dot_s2__pb2.AppendSessionResponse.FromString, - _registered_method=True, - ) - self.Read = channel.unary_unary( - "/s2.v1alpha.StreamService/Read", - request_serializer=s2_dot_v1alpha_dot_s2__pb2.ReadRequest.SerializeToString, - response_deserializer=s2_dot_v1alpha_dot_s2__pb2.ReadResponse.FromString, - _registered_method=True, - ) - self.ReadSession = channel.unary_stream( - "/s2.v1alpha.StreamService/ReadSession", - request_serializer=s2_dot_v1alpha_dot_s2__pb2.ReadSessionRequest.SerializeToString, - response_deserializer=s2_dot_v1alpha_dot_s2__pb2.ReadSessionResponse.FromString, - _registered_method=True, - ) - - -class StreamServiceServicer(object): - """Operate on an S2 stream.""" - - def CheckTail(self, request, context): - """Check the tail of the stream.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def Append(self, request, context): - """Append a batch of records to a stream.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def AppendSession(self, request_iterator, context): - """Append batches of records to a stream continuously, while guaranteeing pipelined requests are processed in order. - If any request fails, the session is terminated. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def Read(self, request, context): - """Retrieve a batch of records from a stream.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ReadSession(self, request, context): - """Retrieve batches of records from a stream continuously.""" - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_StreamServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "CheckTail": grpc.unary_unary_rpc_method_handler( - servicer.CheckTail, - request_deserializer=s2_dot_v1alpha_dot_s2__pb2.CheckTailRequest.FromString, - response_serializer=s2_dot_v1alpha_dot_s2__pb2.CheckTailResponse.SerializeToString, - ), - "Append": grpc.unary_unary_rpc_method_handler( - servicer.Append, - request_deserializer=s2_dot_v1alpha_dot_s2__pb2.AppendRequest.FromString, - response_serializer=s2_dot_v1alpha_dot_s2__pb2.AppendResponse.SerializeToString, - ), - "AppendSession": grpc.stream_stream_rpc_method_handler( - servicer.AppendSession, - request_deserializer=s2_dot_v1alpha_dot_s2__pb2.AppendSessionRequest.FromString, - response_serializer=s2_dot_v1alpha_dot_s2__pb2.AppendSessionResponse.SerializeToString, - ), - "Read": grpc.unary_unary_rpc_method_handler( - servicer.Read, - request_deserializer=s2_dot_v1alpha_dot_s2__pb2.ReadRequest.FromString, - response_serializer=s2_dot_v1alpha_dot_s2__pb2.ReadResponse.SerializeToString, - ), - "ReadSession": grpc.unary_stream_rpc_method_handler( - servicer.ReadSession, - request_deserializer=s2_dot_v1alpha_dot_s2__pb2.ReadSessionRequest.FromString, - response_serializer=s2_dot_v1alpha_dot_s2__pb2.ReadSessionResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "s2.v1alpha.StreamService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) - server.add_registered_method_handlers( - "s2.v1alpha.StreamService", rpc_method_handlers - ) - - -# This class is part of an EXPERIMENTAL API. -class StreamService(object): - """Operate on an S2 stream.""" - - @staticmethod - def CheckTail( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/s2.v1alpha.StreamService/CheckTail", - s2_dot_v1alpha_dot_s2__pb2.CheckTailRequest.SerializeToString, - s2_dot_v1alpha_dot_s2__pb2.CheckTailResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True, - ) - - @staticmethod - def Append( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/s2.v1alpha.StreamService/Append", - s2_dot_v1alpha_dot_s2__pb2.AppendRequest.SerializeToString, - s2_dot_v1alpha_dot_s2__pb2.AppendResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True, - ) - - @staticmethod - def AppendSession( - request_iterator, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.stream_stream( - request_iterator, - target, - "/s2.v1alpha.StreamService/AppendSession", - s2_dot_v1alpha_dot_s2__pb2.AppendSessionRequest.SerializeToString, - s2_dot_v1alpha_dot_s2__pb2.AppendSessionResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True, - ) - - @staticmethod - def Read( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/s2.v1alpha.StreamService/Read", - s2_dot_v1alpha_dot_s2__pb2.ReadRequest.SerializeToString, - s2_dot_v1alpha_dot_s2__pb2.ReadResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True, - ) - - @staticmethod - def ReadSession( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_stream( - request, - target, - "/s2.v1alpha.StreamService/ReadSession", - s2_dot_v1alpha_dot_s2__pb2.ReadSessionRequest.SerializeToString, - s2_dot_v1alpha_dot_s2__pb2.ReadSessionResponse.FromString, - options, - channel_credentials, - insecure, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - _registered_method=True, - ) diff --git a/src/streamstore/_mappers.py b/src/streamstore/_mappers.py deleted file mode 100644 index 0a97393..0000000 --- a/src/streamstore/_mappers.py +++ /dev/null @@ -1,361 +0,0 @@ -from datetime import datetime -from typing import Literal, cast - -from google.protobuf.internal.containers import RepeatedCompositeFieldContainer - -import streamstore._lib.s2.v1alpha.s2_pb2 as msgs -from streamstore.schemas import ( - AccessTokenInfo, - AccessTokenScope, - AppendInput, - AppendOutput, - BasinConfig, - BasinInfo, - BasinScope, - BasinState, - Operation, - OperationGroupPermissions, - Permission, - ReadLimit, - Record, - ResourceMatchOp, - ResourceMatchRule, - SeqNum, - SequencedRecord, - StorageClass, - StreamConfig, - StreamInfo, - TailOffset, - Timestamp, - Timestamping, - TimestampingMode, -) - -_ReadStart = SeqNum | Timestamp | TailOffset - - -def append_record_message(record: Record) -> msgs.AppendRecord: - headers = [msgs.Header(name=name, value=value) for (name, value) in record.headers] - return msgs.AppendRecord( - timestamp=record.timestamp, headers=headers, body=record.body - ) - - -def append_input_message(stream: str, input: AppendInput) -> msgs.AppendInput: - records = [append_record_message(r) for r in input.records] - return msgs.AppendInput( - stream=stream, - records=records, - match_seq_num=input.match_seq_num, - fencing_token=input.fencing_token, - ) - - -def read_request_message( - stream: str, - start: _ReadStart, - limit: ReadLimit | None, - until: int | None, -) -> msgs.ReadRequest: - seq_num, timestamp, tail_offset = _read_start_pos(start) - return msgs.ReadRequest( - stream=stream, - seq_num=seq_num, - timestamp=timestamp, - tail_offset=tail_offset, - limit=_read_limit_message(limit), - until=until, - ) - - -def read_session_request_message( - stream: str, - start: _ReadStart, - limit: ReadLimit | None, - until: int | None, - clamp: bool = False, -) -> msgs.ReadSessionRequest: - seq_num, timestamp, tail_offset = _read_start_pos(start) - return msgs.ReadSessionRequest( - stream=stream, - seq_num=seq_num, - timestamp=timestamp, - tail_offset=tail_offset, - limit=_read_limit_message(limit), - until=until, - clamp=clamp, - ) - - -def _read_start_pos(start: _ReadStart) -> tuple[int | None, int | None, int | None]: - seq_num = None - timestamp = None - tail_offset = None - if isinstance(start, SeqNum): - seq_num = start.value - elif isinstance(start, Timestamp): - timestamp = start.value - elif isinstance(start, TailOffset): - tail_offset = start.value - else: - raise ValueError("start doesn't match any of the expected types") - return ( - seq_num, - timestamp, - tail_offset, - ) - - -def basin_info_schema(info: msgs.BasinInfo) -> BasinInfo: - return BasinInfo(info.name, BasinScope(info.scope), BasinState(info.state)) - - -def stream_info_schema(info: msgs.StreamInfo) -> StreamInfo: - return StreamInfo( - info.name, - datetime.fromtimestamp(info.created_at), - datetime.fromtimestamp(info.deleted_at) if info.deleted_at != 0 else None, - ) - - -def stream_config_message( - config: StreamConfig | None = None, - return_mask_paths: bool = False, - mask_path_prefix: str = "", -) -> msgs.StreamConfig | tuple[msgs.StreamConfig, list[str]]: - paths = [] - stream_config = msgs.StreamConfig() - if config: - storage_class = config.storage_class - retention_policy = config.retention_policy - timestamping = config.timestamping - delete_on_empty_min_age = config.delete_on_empty_min_age - if storage_class is not None: - paths.append(f"{mask_path_prefix}storage_class") - stream_config.storage_class = storage_class.value - if retention_policy is not None: - paths.append(f"{mask_path_prefix}retention_policy") - if retention_policy == "infinite": - stream_config.infinite.CopyFrom(msgs.StreamConfig.InfiniteRetention()) - else: - stream_config.age = retention_policy - if timestamping is not None: - paths.append(f"{mask_path_prefix}timestamping") - if timestamping.mode is not None: - paths.append(f"{mask_path_prefix}timestamping.mode") - stream_config.timestamping.mode = timestamping.mode.value - if timestamping.uncapped is not None: - paths.append(f"{mask_path_prefix}timestamping.uncapped") - stream_config.timestamping.uncapped = timestamping.uncapped - if delete_on_empty_min_age is not None: - paths.append(f"{mask_path_prefix}delete_on_empty.min_age_secs") - stream_config.delete_on_empty.min_age_secs = delete_on_empty_min_age - if return_mask_paths: - return (stream_config, paths) - return stream_config - - -def basin_config_message( - config: BasinConfig | None = None, - return_mask_paths: bool = False, -) -> msgs.BasinConfig | tuple[msgs.BasinConfig, list[str]]: - paths = [] - basin_config = msgs.BasinConfig() - if config: - if return_mask_paths: - default_stream_config, deep_paths = cast( - tuple[msgs.StreamConfig, list[str]], - stream_config_message( - config.default_stream_config, - return_mask_paths, - mask_path_prefix="default_stream_config.", - ), - ) - paths.extend(deep_paths) - else: - default_stream_config = cast( - msgs.StreamConfig, stream_config_message(config.default_stream_config) - ) - basin_config.default_stream_config.CopyFrom(default_stream_config) - if config.create_stream_on_append is not None: - basin_config.create_stream_on_append = config.create_stream_on_append - paths.append("create_stream_on_append") - if return_mask_paths: - return (basin_config, paths) - return basin_config - - -def stream_config_schema(config: msgs.StreamConfig) -> StreamConfig: - retention_policy: int | Literal["infinite"] - match config.WhichOneof("retention_policy"): - case "age": - retention_policy = config.age - case "infinite": - retention_policy = "infinite" - case _: - raise RuntimeError( - "StreamConfig retention_policy doesn't match any of the expected values" - ) - return StreamConfig( - StorageClass(config.storage_class), - retention_policy, - Timestamping( - mode=TimestampingMode(config.timestamping.mode), - uncapped=config.timestamping.uncapped, - ), - config.delete_on_empty.min_age_secs, - ) - - -def basin_config_schema(config: msgs.BasinConfig) -> BasinConfig: - return BasinConfig( - stream_config_schema(config.default_stream_config), - config.create_stream_on_append, - ) - - -def append_output_schema(output: msgs.AppendOutput) -> AppendOutput: - return AppendOutput( - output.start_seq_num, - output.start_timestamp, - output.end_seq_num, - output.end_timestamp, - output.next_seq_num, - output.last_timestamp, - ) - - -def sequenced_records_schema( - batch: msgs.SequencedRecordBatch, ignore_command_records: bool = False -) -> list[SequencedRecord]: - if ignore_command_records: - return [ - SequencedRecord( - sr.seq_num, - sr.body, - [(h.name, h.value) for h in sr.headers], - sr.timestamp, - ) - for sr in batch.records - if _not_a_command_record(sr.headers) - ] - return [ - SequencedRecord( - sr.seq_num, sr.body, [(h.name, h.value) for h in sr.headers], sr.timestamp - ) - for sr in batch.records - ] - - -def access_token_info_message( - id: str, scope: AccessTokenScope, auto_prefix_streams: bool, expires_at: int | None -) -> msgs.AccessTokenInfo: - def resource_set(rule: ResourceMatchRule | None) -> msgs.ResourceSet | None: - if rule is None: - return None - match rule.match_op: - case ResourceMatchOp.EXACT: - return msgs.ResourceSet(exact=rule.value) - case ResourceMatchOp.PREFIX: - return msgs.ResourceSet(prefix=rule.value) - case _: - raise ValueError( - "ResourceMatchOp doesn't match any of the expected values" - ) - - def permissions(perm: Permission) -> msgs.ReadWritePermissions: - read = False - write = False - match perm: - case Permission.UNSPECIFIED: - pass - case Permission.READ: - read = True - case Permission.WRITE: - write = True - case Permission.READ_WRITE: - read = True - write = True - return msgs.ReadWritePermissions(read=read, write=write) - - def permitted_op_groups( - op_group_perms: OperationGroupPermissions | None, - ) -> msgs.PermittedOperationGroups | None: - if op_group_perms is None: - return None - return msgs.PermittedOperationGroups( - account=permissions(op_group_perms.account), - basin=permissions(op_group_perms.basin), - stream=permissions(op_group_perms.stream), - ) - - return msgs.AccessTokenInfo( - id=id, - expires_at=expires_at, - auto_prefix_streams=auto_prefix_streams, - scope=msgs.AccessTokenScope( - basins=resource_set(scope.basins), - streams=resource_set(scope.streams), - access_tokens=resource_set(scope.access_tokens), - op_groups=permitted_op_groups(scope.op_group_perms), - ops=(op.value for op in scope.ops), - ), - ) - - -def access_token_info_schema(info: msgs.AccessTokenInfo) -> AccessTokenInfo: - def resource_match_rule(resource_set: msgs.ResourceSet) -> ResourceMatchRule | None: - if not resource_set.HasField("matching"): - return None - match resource_set.WhichOneof("matching"): - case "exact": - return ResourceMatchRule(ResourceMatchOp.EXACT, resource_set.exact) - case "prefix": - return ResourceMatchRule(ResourceMatchOp.PREFIX, resource_set.prefix) - case _: - raise RuntimeError( - "ResourceSet matching doesn't match any of the expected values" - ) - - def permission(perms: msgs.ReadWritePermissions) -> Permission: - if perms.read and perms.write: - return Permission.READ_WRITE - elif perms.read: - return Permission.READ - elif perms.write: - return Permission.WRITE - else: - return Permission.UNSPECIFIED - - return AccessTokenInfo( - id=info.id, - scope=AccessTokenScope( - basins=resource_match_rule(info.scope.basins), - streams=resource_match_rule(info.scope.streams), - access_tokens=resource_match_rule(info.scope.access_tokens), - op_group_perms=OperationGroupPermissions( - account=permission(info.scope.op_groups.account), - basin=permission(info.scope.op_groups.basin), - stream=permission(info.scope.op_groups.stream), - ), - ops=[Operation(op) for op in info.scope.ops], - ), - auto_prefix_streams=info.auto_prefix_streams, - expires_at=info.expires_at if info.HasField("expires_at") else None, - ) - - -def _read_limit_message(limit: ReadLimit | None) -> msgs.ReadLimit: - return ( - msgs.ReadLimit(count=limit.count, bytes=limit.bytes) - if limit - else msgs.ReadLimit() - ) - - -def _not_a_command_record( - headers: RepeatedCompositeFieldContainer[msgs.Header], -) -> bool: - if len(headers) == 1 and headers[0].name == b"": - return False - return True diff --git a/src/streamstore/schemas.py b/src/streamstore/schemas.py deleted file mode 100644 index 34ad5ee..0000000 --- a/src/streamstore/schemas.py +++ /dev/null @@ -1,509 +0,0 @@ -__all__ = [ - "Record", - "AppendInput", - "AppendOutput", - "Tail", - "SeqNum", - "Timestamp", - "TailOffset", - "ReadLimit", - "SequencedRecord", - "FirstSeqNum", - "NextSeqNum", - "Page", - "BasinScope", - "BasinState", - "BasinInfo", - "StreamInfo", - "StorageClass", - "TimestampingMode", - "Timestamping", - "StreamConfig", - "BasinConfig", - "ResourceMatchOp", - "ResourceMatchRule", - "Permission", - "OperationGroupPermissions", - "Operation", - "AccessTokenScope", - "AccessTokenInfo", - "Cloud", - "Endpoints", -] - -import os -from dataclasses import dataclass, field -from datetime import datetime -from enum import Enum -from typing import Generic, Literal, TypeVar - -from streamstore._exceptions import fallible - -T = TypeVar("T") - -ONE_MIB = 1024 * 1024 - - -class DocEnum(Enum): - def __new__(cls, value, doc=None): - self = object.__new__(cls) - self._value_ = value - if doc is not None: - self.__doc__ = doc - return self - - -@dataclass(slots=True) -class Record: - """ - Record to be appended to a stream. - """ - - #: Body of this record. - body: bytes - #: Series of name-value pairs for this record. - headers: list[tuple[bytes, bytes]] = field(default_factory=list) - #: Timestamp for this record. - #: - #: Precise semantics depend on :attr:`.StreamConfig.timestamping`. - timestamp: int | None = None - - -@dataclass(slots=True) -class AppendInput: - """ - Used in the parameters to :meth:`.Stream.append` and :meth:`.Stream.append_session`. - """ - - #: Batch of records to append atomically, which must contain at least one record, - #: and no more than 1000. The size of the batch must not exceed 1MiB of :func:`.metered_bytes`. - records: list[Record] - #: Enforce that the sequence number issued to the first record in the batch matches this value. - match_seq_num: int | None = None - #: Enforce a fencing token, which must have been previously set by a ``fence`` command record. - fencing_token: str | None = None - - -@dataclass(slots=True) -class AppendOutput: - """ - Returned from :meth:`.Stream.append`. - - (or) - - Yielded from :meth:`.Stream.append_session`. - """ - - #: Sequence number of the first appended record. - start_seq_num: int - #: Timestamp of the first appended record. - start_timestamp: int - #: Sequence number of the last appended record + 1. - #: ``end_seq_num - start_seq_num`` will be the number of records in the batch. - end_seq_num: int - #: Timestamp of the last appended record. - end_timestamp: int - #: Sequence number of the last durable record on the stream + 1. - #: This can be greater than ``end_seq_num`` in case of concurrent appends. - next_seq_num: int - #: Timestamp of the last durable record on the stream. - last_timestamp: int - - -@dataclass(slots=True) -class Tail: - """ - Tail of a stream. - """ - - #: Sequence number of the last durable record on the stream + 1. - next_seq_num: int - #: Timestamp of the last durable record on the stream. - last_timestamp: int - - -@dataclass(slots=True) -class ReadLimit: - """ - Used in the parameters to :meth:`.Stream.read` and :meth:`.Stream.read_session`. - - If both ``count`` and ``bytes`` are specified, either limit may be hit. - """ - - #: Number of records. - count: int | None = None - #: Cumulative size of records calculated using :func:`.metered_bytes`. - bytes: int | None = None - - -@dataclass(slots=True) -class SequencedRecord: - """ - Record read from a stream. - """ - - #: Sequence number assigned to this record. - seq_num: int - #: Body of this record. - body: bytes - #: Series of name-value pairs for this record. - headers: list[tuple[bytes, bytes]] - #: Timestamp for this record. - timestamp: int - - -@dataclass(slots=True) -class SeqNum: - value: int - - -@dataclass(slots=True) -class Timestamp: - value: int - - -@dataclass(slots=True) -class TailOffset: - """ - Number of records before the tail. - """ - - value: int - - -@dataclass(slots=True) -class FirstSeqNum: - value: int - - -@dataclass(slots=True) -class NextSeqNum: - value: int - - -@dataclass(slots=True) -class Page(Generic[T]): - """ - Page of items. - """ - - #: List of items of any type T. - items: list[T] - #: If ``True``, it means that there are more pages. - has_more: bool - - -class BasinScope(DocEnum): - """ - Scope of a basin. - """ - - UNSPECIFIED = 0, "``UNSPECIFIED`` defaults to ``AWS_US_EAST_1``." - AWS_US_EAST_1 = 1, "AWS ``us-east-1`` region." - - -class BasinState(DocEnum): - """ - Current state of a basin. - """ - - UNSPECIFIED = 0 - ACTIVE = 1 - CREATING = 2 - DELETING = 3 - - -@dataclass(slots=True) -class BasinInfo: - """ - Basin information. - """ - - #: Basin name. - name: str - #: Basin scope. - scope: BasinScope - #: Basin state. - state: BasinState - - -@dataclass(slots=True) -class StreamInfo: - """ - Stream information. - """ - - #: Stream name. - name: str - #: Creation time. - created_at: datetime - #: Deletion time, if this stream is being deleted. - deleted_at: datetime | None - - -class StorageClass(DocEnum): - """ - Storage class for recent appends. - """ - - STANDARD = 1, "Offers end-to-end latencies under 500 ms." - EXPRESS = 2, "Offers end-to-end latencies under 50 ms." - - -class TimestampingMode(DocEnum): - """ - Timestamping mode. - - Note: - The arrival time is always in milliseconds since Unix epoch. - """ - - UNSPECIFIED = 0, "Defaults to ``CLIENT_PREFER``." - CLIENT_PREFER = ( - 1, - "Prefer client-specified timestamp if present, otherwise use arrival time.", - ) - CLIENT_REQUIRE = ( - 2, - "Require a client-specified timestamp and reject the append if it is absent.", - ) - ARRIVAL = 3, "Use the arrival time and ignore any client-specified timestamp." - - -@dataclass(slots=True) -class Timestamping: - """ - Timestamping behavior. - """ - - #: Timestamping mode. - #: - #: If not specified, the default is :attr:`.TimestampingMode.CLIENT_PREFER`. - mode: TimestampingMode | None = None - #: Allow client-specified timestamps to exceed the arrival time. - uncapped: bool | None = None - - -@dataclass(slots=True) -class StreamConfig: - """ - Stream configuration. - """ - - #: Storage class for this stream. - #: - #: If not specified, the default is :attr:`.StorageClass.EXPRESS`. - storage_class: StorageClass | None = None - #: Retention policy for records in this stream. - #: - #: Retention duration in seconds to automatically trim records older than this duration. - #: - #: ``'infinite'`` to retain records indefinitely. - #: (While S2 is in public preview, this is capped at 28 days. Let us know if you'd like the cap removed.) - #: - #: If not specified, the default is to retain records for 7 days. - retention_policy: int | Literal["infinite"] | None = None - #: Timestamping behavior for appends to this stream, which influences how timestamps are handled. - timestamping: Timestamping | None = None - #: Minimum age in seconds before this stream can be automatically deleted if empty. - #: - #: If not specified or set to ``0``, this stream will not be automatically deleted. - delete_on_empty_min_age: int | None = None - - -@dataclass(slots=True) -class BasinConfig: - """ - Basin configuration. - """ - - #: Default configuration for streams in this basin. - default_stream_config: StreamConfig | None = None - #: Create stream on append if it doesn't exist, using the default stream configuration. - create_stream_on_append: bool | None = None - - -class ResourceMatchOp(DocEnum): - """ - Resource match operator. - """ - - EXACT = ( - 1, - "Match only the resource with the exact value. Use an empty string to match no resources.", - ) - PREFIX = ( - 2, - "Match all resources that start with the prefix value. Use an empty string to match all resources.", - ) - - -@dataclass(slots=True) -class ResourceMatchRule: - """ - Resource match rule. - """ - - #: Match operator. - match_op: ResourceMatchOp - #: Value to match. - value: str - - -class Permission(DocEnum): - """ - Permission. - """ - - UNSPECIFIED = 0 - READ = 1 - WRITE = 2 - READ_WRITE = 3 - - -@dataclass(slots=True) -class OperationGroupPermissions: - """ - Operation group permissions. - """ - - #: Permission for account operations. - account: Permission = Permission.UNSPECIFIED - #: Permission for basin operations. - basin: Permission = Permission.UNSPECIFIED - #: Permission for stream operations. - stream: Permission = Permission.UNSPECIFIED - - -class Operation(DocEnum): - """ - Operation. - """ - - UNSPECIFIED = 0 - LIST_BASINS = 1 - CREATE_BASIN = 2 - DELETE_BASIN = 3 - RECONFIGURE_BASIN = 4 - GET_BASIN_CONFIG = 5 - ISSUE_ACCESS_TOKEN = 6 - REVOKE_ACCESS_TOKEN = 7 - LIST_ACCESS_TOKENS = 8 - LIST_STREAMS = 9 - CREATE_STREAM = 10 - DELETE_STREAM = 11 - GET_STREAM_CONFIG = 12 - RECONFIGURE_STREAM = 13 - CHECK_TAIL = 14 - APPEND = 15 - READ = 16 - - -@dataclass(slots=True) -class AccessTokenScope: - """ - Access token scope. - """ - - #: Allowed basins. - basins: ResourceMatchRule | None = None - #: Allowed streams. - streams: ResourceMatchRule | None = None - #: Allowed access token IDs. - access_tokens: ResourceMatchRule | None = None - #: Permissions at operation group level. - op_group_perms: OperationGroupPermissions | None = None - #: Allowed operations. - #: - #: Note: - #: A union of allowed operations and groups is used as the effective set of allowed operations. - ops: list[Operation] = field(default_factory=list) - - -@dataclass(slots=True) -class AccessTokenInfo: - """ - Access token information. - """ - - #: Access token ID. - id: str - #: Access token scope. - scope: AccessTokenScope - #: Expiration time in seconds since Unix epoch. - expires_at: int | None - #: Whether auto-prefixing is enabled for streams in scope. - auto_prefix_streams: bool - - -class Cloud(DocEnum): - """ - Cloud in which the S2 service runs. - """ - - AWS = 1 - - -class Endpoints: - """ - `S2 endpoints `_. - """ - - __slots__ = ("_account_authority", "_basin_base_authority") - - _account_authority: str - _basin_base_authority: str - - def __init__(self, account_authority: str, basin_base_authority: str): - self._account_authority = account_authority - self._basin_base_authority = basin_base_authority - - @classmethod - @fallible - def for_cloud(cls, cloud: Cloud) -> "Endpoints": - """ - Construct S2 endpoints for the given cloud. - - Args: - cloud: Cloud in which the S2 service runs. - """ - return cls( - _account_authority(cloud), - _basin_authority(cloud), - ) - - @classmethod - @fallible - def _from_env(cls) -> "Endpoints": - account_authority = os.getenv("S2_ACCOUNT_ENDPOINT") - basin_authority = os.getenv("S2_BASIN_ENDPOINT") - if ( - account_authority - and basin_authority - and basin_authority.startswith("{basin}.") - ): - basin_base_authority = basin_authority.removeprefix("{basin}.") - return cls(account_authority, basin_base_authority) - raise ValueError("Invalid S2_ACCOUNT_ENDPOINT and/or S2_BASIN_ENDPOINT") - - def _account(self) -> str: - return self._account_authority - - def _basin(self, basin_name: str) -> str: - return f"{basin_name}.{self._basin_base_authority}" - - -def _account_authority(cloud: Cloud) -> str: - match cloud: - case Cloud.AWS: - return "aws.s2.dev" - case _: - raise ValueError(f"Invalid cloud: {cloud}") - - -def _basin_authority(cloud: Cloud) -> str: - match cloud: - case Cloud.AWS: - return "b.aws.s2.dev" - case _: - raise ValueError(f"Invalid cloud: {cloud}") diff --git a/tests/conftest.py b/tests/conftest.py index 97c4c05..7d9940d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,7 +5,7 @@ import pytest import pytest_asyncio -from streamstore import S2, Basin, Stream +from s2_sdk import S2, S2Basin, S2Stream pytest_plugins = ["pytest_asyncio"] @@ -28,8 +28,8 @@ def basin_prefix() -> str: @pytest_asyncio.fixture(scope="session") async def s2(access_token: str) -> AsyncGenerator[S2, None]: - async with S2(access_token=access_token) as client: - yield client + async with S2(access_token=access_token) as s2: + yield s2 @pytest.fixture @@ -58,10 +58,8 @@ def token_id() -> str: @pytest_asyncio.fixture -async def basin(s2: S2, basin_name: str) -> AsyncGenerator[Basin, None]: - await s2.create_basin( - name=basin_name, - ) +async def basin(s2: S2, basin_name: str) -> AsyncGenerator[S2Basin, None]: + await s2.create_basin(name=basin_name) try: yield s2.basin(basin_name) @@ -70,7 +68,7 @@ async def basin(s2: S2, basin_name: str) -> AsyncGenerator[Basin, None]: @pytest_asyncio.fixture(scope="class") -async def shared_basin(s2: S2) -> AsyncGenerator[Basin, None]: +async def shared_basin(s2: S2) -> AsyncGenerator[S2Basin, None]: basin_name = _basin_name() await s2.create_basin(name=basin_name) @@ -81,7 +79,9 @@ async def shared_basin(s2: S2) -> AsyncGenerator[Basin, None]: @pytest_asyncio.fixture -async def stream(shared_basin: Basin, stream_name: str) -> AsyncGenerator[Stream, None]: +async def stream( + shared_basin: S2Basin, stream_name: str +) -> AsyncGenerator[S2Stream, None]: basin = shared_basin await basin.create_stream(name=stream_name) diff --git a/tests/test_account_ops.py b/tests/test_account_ops.py index 230a657..4179ce1 100644 --- a/tests/test_account_ops.py +++ b/tests/test_account_ops.py @@ -1,9 +1,9 @@ -import time +from datetime import datetime, timedelta, timezone import pytest -from streamstore import S2, Basin -from streamstore.schemas import ( +from s2_sdk import S2, S2Basin +from s2_sdk.types import ( AccessTokenScope, BasinConfig, BasinScope, @@ -11,8 +11,7 @@ Operation, OperationGroupPermissions, Permission, - ResourceMatchOp, - ResourceMatchRule, + PrefixMatch, StorageClass, StreamConfig, Timestamping, @@ -52,11 +51,25 @@ async def test_create_basin_with_config(self, s2: S2, basin_name: str): assert basin_info.name == basin_name retrieved_config = await s2.get_basin_config(basin_name) - assert config == retrieved_config + assert retrieved_config.default_stream_config is not None + assert ( + retrieved_config.default_stream_config.storage_class + == StorageClass.STANDARD + ) + assert retrieved_config.default_stream_config.retention_policy == 86400 * 7 + assert ( + retrieved_config.default_stream_config.timestamping.mode + == TimestampingMode.CLIENT_REQUIRE + ) + assert retrieved_config.default_stream_config.timestamping.uncapped is True + assert ( + retrieved_config.default_stream_config.delete_on_empty_min_age == 3600 + ) + assert retrieved_config.create_stream_on_append is True finally: await s2.delete_basin(basin_name) - async def test_reconfigure_basin(self, s2: S2, basin: Basin): + async def test_reconfigure_basin(self, s2: S2, basin: S2Basin): config = BasinConfig( default_stream_config=StreamConfig( storage_class=StorageClass.STANDARD, @@ -67,30 +80,19 @@ async def test_reconfigure_basin(self, s2: S2, basin: Basin): updated_config = await s2.reconfigure_basin(basin.name, config) - assert config.default_stream_config is not None + assert updated_config.default_stream_config is not None assert ( - updated_config.default_stream_config.storage_class - == config.default_stream_config.storage_class + updated_config.default_stream_config.storage_class == StorageClass.STANDARD ) - assert ( - updated_config.default_stream_config.retention_policy - == config.default_stream_config.retention_policy - ) - assert updated_config.create_stream_on_append == config.create_stream_on_append - - assert ( - updated_config.default_stream_config.timestamping.mode - == TimestampingMode.UNSPECIFIED - ) - - assert updated_config.default_stream_config.delete_on_empty_min_age == 0 + assert updated_config.default_stream_config.retention_policy == 3600 + assert updated_config.create_stream_on_append is True async def test_list_basins(self, s2: S2, basin_names: list[str]): basin_infos = [] try: for basin_name in basin_names: - stream_info = await s2.create_basin(name=basin_name) - basin_infos.append(stream_info) + basin_info = await s2.create_basin(name=basin_name) + basin_infos.append(basin_info) page = await s2.list_basins() @@ -105,8 +107,8 @@ async def test_list_basins_with_limit(self, s2: S2, basin_names: list[str]): basin_infos = [] try: for basin_name in basin_names: - stream_info = await s2.create_basin(name=basin_name) - basin_infos.append(stream_info) + basin_info = await s2.create_basin(name=basin_name) + basin_infos.append(basin_info) page = await s2.list_basins(limit=1) @@ -120,13 +122,13 @@ async def test_list_basins_with_prefix(self, s2: S2, basin_name: str): await s2.create_basin(name=basin_name) try: - prefix = basin_name[:5] + prefix = basin_name[:12] page = await s2.list_basins(prefix=prefix) - basin_names = [b.name for b in page.items] - assert basin_name in basin_names + names = [b.name for b in page.items] + assert basin_name in names - for name in basin_names: + for name in names: assert name.startswith(prefix) finally: @@ -134,11 +136,9 @@ async def test_list_basins_with_prefix(self, s2: S2, basin_name: str): async def test_issue_access_token(self, s2: S2, token_id: str, basin_prefix: str): scope = AccessTokenScope( - basins=ResourceMatchRule( - match_op=ResourceMatchOp.PREFIX, value=basin_prefix - ), - streams=ResourceMatchRule(match_op=ResourceMatchOp.PREFIX, value=""), - op_group_perms=OperationGroupPermissions( + basins=PrefixMatch(basin_prefix), + streams=PrefixMatch(""), + op_groups=OperationGroupPermissions( basin=Permission.READ, stream=Permission.READ, ), @@ -150,14 +150,13 @@ async def test_issue_access_token(self, s2: S2, token_id: str, basin_prefix: str assert isinstance(token, str) assert len(token) > 0 finally: - token_info = await s2.revoke_access_token(token_id) - assert token_info.scope == scope + await s2.revoke_access_token(token_id) async def test_issue_access_token_with_expiry(self, s2: S2, token_id: str): - expires_at = int(time.time()) + 3600 + expires_at = (datetime.now(timezone.utc) + timedelta(hours=1)).isoformat() scope = AccessTokenScope( - streams=ResourceMatchRule(match_op=ResourceMatchOp.PREFIX, value=""), + streams=PrefixMatch(""), ops=[Operation.READ, Operation.CHECK_TAIL], ) @@ -175,17 +174,16 @@ async def test_issue_access_token_with_expiry(self, s2: S2, token_id: str): token_info = next((t for t in page.items if t.id == token_id), None) assert token_info is not None - assert token_info.expires_at == expires_at - assert token_info.scope.streams == scope.streams - assert set(token_info.scope.ops) == set(scope.ops) + assert token_info.expires_at is not None + assert token_info.scope.streams is not None finally: await s2.revoke_access_token(token_id) async def test_issue_access_token_with_auto_prefix(self, s2: S2, token_id: str): scope = AccessTokenScope( - streams=ResourceMatchRule(match_op=ResourceMatchOp.PREFIX, value="prefix/"), - op_group_perms=OperationGroupPermissions(stream=Permission.READ_WRITE), + streams=PrefixMatch("prefix/"), + op_groups=OperationGroupPermissions(stream=Permission.READ_WRITE), ) token = await s2.issue_access_token( @@ -204,7 +202,6 @@ async def test_issue_access_token_with_auto_prefix(self, s2: S2, token_id: str): token_info = page.items[0] assert token_info is not None - assert token_info.scope == scope assert token_info.auto_prefix_streams is True finally: diff --git a/tests/test_basin_ops.py b/tests/test_basin_ops.py index aabb6ee..8076815 100644 --- a/tests/test_basin_ops.py +++ b/tests/test_basin_ops.py @@ -1,7 +1,7 @@ import pytest -from streamstore import Basin, Stream -from streamstore.schemas import ( +from s2_sdk import S2Basin, S2Stream +from s2_sdk.types import ( StorageClass, StreamConfig, Timestamping, @@ -11,7 +11,7 @@ @pytest.mark.basin class TestBasinOperations: - async def test_create_stream(self, shared_basin: Basin, stream_name: str): + async def test_create_stream(self, shared_basin: S2Basin, stream_name: str): basin = shared_basin stream_info = await basin.create_stream(name=stream_name) @@ -24,7 +24,7 @@ async def test_create_stream(self, shared_basin: Basin, stream_name: str): await basin.delete_stream(stream_name) async def test_create_stream_with_config( - self, shared_basin: Basin, stream_name: str + self, shared_basin: S2Basin, stream_name: str ): basin = shared_basin @@ -43,19 +43,24 @@ async def test_create_stream_with_config( assert stream_info.name == stream_name retrieved_config = await basin.get_stream_config(stream_name) - assert retrieved_config == config + assert retrieved_config.storage_class == StorageClass.STANDARD + assert retrieved_config.retention_policy == 86400 * 3 + assert retrieved_config.timestamping is not None + assert retrieved_config.timestamping.mode == TimestampingMode.ARRIVAL + assert retrieved_config.timestamping.uncapped is False + assert retrieved_config.delete_on_empty_min_age == 7200 finally: await basin.delete_stream(stream_name) - async def test_default_stream_config(self, shared_basin: Basin, stream: Stream): + async def test_default_stream_config(self, shared_basin: S2Basin, stream: S2Stream): basin = shared_basin config = await basin.get_stream_config(stream.name) assert config.storage_class == StorageClass.EXPRESS assert config.retention_policy == 86400 * 7 - async def test_reconfigure_stream(self, shared_basin: Basin, stream: Stream): + async def test_reconfigure_stream(self, shared_basin: S2Basin, stream: S2Stream): basin = shared_basin config = StreamConfig( storage_class=StorageClass.STANDARD, @@ -67,7 +72,12 @@ async def test_reconfigure_stream(self, shared_basin: Basin, stream: Stream): ) updated_config = await basin.reconfigure_stream(stream.name, config) - assert updated_config == config + assert updated_config.storage_class == StorageClass.STANDARD + assert updated_config.retention_policy == "infinite" + assert updated_config.timestamping is not None + assert updated_config.timestamping.mode == TimestampingMode.CLIENT_REQUIRE + assert updated_config.timestamping.uncapped is True + assert updated_config.delete_on_empty_min_age == 1800 config = StreamConfig( storage_class=StorageClass.EXPRESS, @@ -78,9 +88,13 @@ async def test_reconfigure_stream(self, shared_basin: Basin, stream: Stream): delete_on_empty_min_age=3600, ) updated_config = await basin.reconfigure_stream(stream.name, config) - assert updated_config == config + assert updated_config.storage_class == StorageClass.EXPRESS + assert updated_config.retention_policy == 86400 * 90 + assert updated_config.timestamping.mode == TimestampingMode.CLIENT_PREFER + assert updated_config.timestamping.uncapped is False + assert updated_config.delete_on_empty_min_age == 3600 - async def test_list_streams(self, shared_basin: Basin, stream_names: list[str]): + async def test_list_streams(self, shared_basin: S2Basin, stream_names: list[str]): basin = shared_basin stream_infos = [] @@ -99,7 +113,7 @@ async def test_list_streams(self, shared_basin: Basin, stream_names: list[str]): await basin.delete_stream(stream_info.name) async def test_list_streams_with_limit( - self, shared_basin: Basin, stream_names: list[str] + self, shared_basin: S2Basin, stream_names: list[str] ): basin = shared_basin @@ -118,7 +132,7 @@ async def test_list_streams_with_limit( await basin.delete_stream(stream_info.name) async def test_list_streams_with_prefix( - self, shared_basin: Basin, stream_name: str + self, shared_basin: S2Basin, stream_name: str ): basin = shared_basin diff --git a/tests/test_compression.py b/tests/test_compression.py new file mode 100644 index 0000000..bc01496 --- /dev/null +++ b/tests/test_compression.py @@ -0,0 +1,38 @@ +from s2_sdk._s2s._compression import ( + COMPRESSION_GZIP, + COMPRESSION_NONE, + COMPRESSION_ZSTD, + compress, + decompress, +) + + +class TestCompression: + def test_zstd_roundtrip(self): + data = b"hello world" * 100 + compressed = compress(data, COMPRESSION_ZSTD) + assert compressed != data + decompressed = decompress(compressed, COMPRESSION_ZSTD) + assert decompressed == data + + def test_gzip_roundtrip(self): + data = b"hello world" * 100 + compressed = compress(data, COMPRESSION_GZIP) + assert compressed != data + decompressed = decompress(compressed, COMPRESSION_GZIP) + assert decompressed == data + + def test_none_passthrough(self): + data = b"hello world" + assert compress(data, COMPRESSION_NONE) == data + assert decompress(data, COMPRESSION_NONE) == data + + def test_zstd_compresses(self): + data = b"aaaa" * 1000 + compressed = compress(data, COMPRESSION_ZSTD) + assert len(compressed) < len(data) + + def test_gzip_compresses(self): + data = b"aaaa" * 1000 + compressed = compress(data, COMPRESSION_GZIP) + assert len(compressed) < len(data) diff --git a/tests/test_mappers.py b/tests/test_mappers.py new file mode 100644 index 0000000..c119bc1 --- /dev/null +++ b/tests/test_mappers.py @@ -0,0 +1,363 @@ +import s2_sdk._generated.s2.v1.s2_pb2 as pb +from s2_sdk._mappers import ( + access_token_info_from_wire, + access_token_info_to_wire, + append_ack_from_json, + append_ack_from_proto, + append_input_to_json, + append_input_to_proto, + basin_config_from_wire, + basin_config_to_wire, + basin_info_from_wire, + metric_set_from_wire, + read_batch_from_proto, + read_limit_params, + read_start_params, + stream_config_from_wire, + stream_config_to_wire, + stream_info_from_wire, + tail_from_wire, +) +from s2_sdk.types import ( + AccessTokenScope, + Accumulation, + AppendInput, + BasinConfig, + BasinScope, + BasinState, + ExactMatch, + Gauge, + Label, + MetricUnit, + OperationGroupPermissions, + Permission, + PrefixMatch, + ReadLimit, + Record, + Scalar, + SeqNum, + StorageClass, + StreamConfig, + TailOffset, + TimeseriesInterval, + Timestamp, + Timestamping, + TimestampingMode, +) + + +class TestBasinInfoMapper: + def test_from_wire(self): + data = {"name": "my-basin-name", "scope": "aws:us-east-1", "state": "active"} + info = basin_info_from_wire(data) + assert info.name == "my-basin-name" + assert info.scope == BasinScope.AWS_US_EAST_1 + assert info.state == BasinState.ACTIVE + + +class TestStreamInfoMapper: + def test_from_wire(self): + data = { + "name": "my-stream", + "created_at": "2024-01-01T00:00:00Z", + "deleted_at": None, + } + info = stream_info_from_wire(data) + assert info.name == "my-stream" + assert info.created_at is not None + assert info.deleted_at is None + + +class TestStreamConfigMapper: + def test_roundtrip(self): + config = StreamConfig( + storage_class=StorageClass.EXPRESS, + retention_policy=86400, + timestamping=Timestamping( + mode=TimestampingMode.CLIENT_PREFER, uncapped=False + ), + delete_on_empty_min_age=3600, + ) + wire = stream_config_to_wire(config) + assert wire is not None + result = stream_config_from_wire(wire) + assert result.storage_class == config.storage_class + assert result.retention_policy == config.retention_policy + assert result.timestamping.mode == config.timestamping.mode + assert result.timestamping.uncapped == config.timestamping.uncapped + assert result.delete_on_empty_min_age == config.delete_on_empty_min_age + + def test_infinite_retention(self): + config = StreamConfig(retention_policy="infinite") + wire = stream_config_to_wire(config) + assert wire is not None + assert wire["retention_policy"] == {"infinite": {}} + result = stream_config_from_wire(wire) + assert result.retention_policy == "infinite" + + def test_none_config(self): + assert stream_config_to_wire(None) is None + + +class TestBasinConfigMapper: + def test_roundtrip(self): + config = BasinConfig( + default_stream_config=StreamConfig(storage_class=StorageClass.STANDARD), + create_stream_on_append=True, + create_stream_on_read=False, + ) + wire = basin_config_to_wire(config) + assert wire is not None + result = basin_config_from_wire(wire) + assert result.default_stream_config is not None + assert result.default_stream_config.storage_class == StorageClass.STANDARD + assert result.create_stream_on_append is True + assert result.create_stream_on_read is False + + +class TestAccessTokenMapper: + def test_roundtrip_with_prefix_match(self): + scope = AccessTokenScope( + basins=PrefixMatch("test-"), + streams=PrefixMatch(""), + op_groups=OperationGroupPermissions( + basin=Permission.READ, + stream=Permission.READ_WRITE, + ), + ) + wire = access_token_info_to_wire("my-token", scope, False, None) + assert wire["id"] == "my-token" + assert wire["scope"]["basins"] == {"prefix": "test-"} + assert wire["scope"]["streams"] == {"prefix": ""} + + result = access_token_info_from_wire(wire) + assert result.id == "my-token" + assert isinstance(result.scope.basins, PrefixMatch) + assert result.scope.basins.value == "test-" + assert isinstance(result.scope.streams, PrefixMatch) + + def test_exact_match(self): + scope = AccessTokenScope(basins=ExactMatch("specific-basin")) + wire = access_token_info_to_wire("t", scope, False, None) + assert wire["scope"]["basins"] == {"exact": "specific-basin"} + result = access_token_info_from_wire(wire) + assert isinstance(result.scope.basins, ExactMatch) + assert result.scope.basins.value == "specific-basin" + + +class TestTailMapper: + def test_from_wire(self): + data = {"tail": {"seq_num": 42, "timestamp": 1000}} + tail = tail_from_wire(data) + assert tail.next_seq_num == 42 + assert tail.last_timestamp == 1000 + + +class TestAppendProtobuf: + def test_input_to_proto(self): + input = AppendInput( + records=[Record(body=b"hello", headers=[(b"k", b"v")], timestamp=100)], + match_seq_num=5, + fencing_token="my-fence", + ) + proto = append_input_to_proto(input) + assert len(proto.records) == 1 + assert proto.records[0].body == b"hello" + assert proto.records[0].timestamp == 100 + assert proto.records[0].headers[0].name == b"k" + assert proto.records[0].headers[0].value == b"v" + assert proto.match_seq_num == 5 + assert proto.fencing_token == "my-fence" + + def test_ack_from_proto(self): + ack = pb.AppendAck( + start=pb.StreamPosition(seq_num=0, timestamp=100), + end=pb.StreamPosition(seq_num=3, timestamp=200), + tail=pb.StreamPosition(seq_num=5, timestamp=300), + ) + output = append_ack_from_proto(ack) + assert output.start.seq_num == 0 + assert output.start.timestamp == 100 + assert output.end.seq_num == 3 + assert output.end.timestamp == 200 + assert output.tail.seq_num == 5 + assert output.tail.timestamp == 300 + + +class TestAppendJson: + def test_input_to_json(self): + input = AppendInput( + records=[Record(body=b"hello", headers=[(b"k", b"v")])], + match_seq_num=5, + ) + json_data = append_input_to_json(input) + assert len(json_data["records"]) == 1 + assert json_data["match_seq_num"] == 5 + + def test_ack_from_json(self): + data = { + "start": {"seq_num": 0, "timestamp": 100}, + "end": {"seq_num": 3, "timestamp": 200}, + "tail": {"seq_num": 5, "timestamp": 300}, + } + output = append_ack_from_json(data) + assert output.start.seq_num == 0 + assert output.end.seq_num == 3 + assert output.tail.seq_num == 5 + + +class TestReadBatchProtobuf: + def test_from_proto(self): + batch = pb.ReadBatch( + records=[ + pb.SequencedRecord( + seq_num=0, + timestamp=100, + headers=[pb.Header(name=b"k", value=b"v")], + body=b"hello", + ), + ], + ) + records, tail = read_batch_from_proto(batch) + assert len(records) == 1 + assert records[0].seq_num == 0 + assert records[0].body == b"hello" + assert records[0].headers == [(b"k", b"v")] + assert tail is None + + def test_with_tail(self): + batch = pb.ReadBatch( + records=[], + tail=pb.StreamPosition(seq_num=10, timestamp=500), + ) + records, tail = read_batch_from_proto(batch) + assert len(records) == 0 + assert tail is not None + assert tail.next_seq_num == 10 + assert tail.last_timestamp == 500 + + def test_filter_command_records(self): + batch = pb.ReadBatch( + records=[ + pb.SequencedRecord( + seq_num=0, + timestamp=100, + headers=[pb.Header(name=b"", value=b"fence")], + body=b"token", + ), + pb.SequencedRecord( + seq_num=1, + timestamp=200, + headers=[], + body=b"data", + ), + ], + ) + records, _ = read_batch_from_proto(batch, ignore_command_records=True) + assert len(records) == 1 + assert records[0].seq_num == 1 + + +class TestReadStartParams: + def test_seq_num(self): + params = read_start_params(SeqNum(42)) + assert params == {"seq_num": 42} + + def test_timestamp(self): + params = read_start_params(Timestamp(1000)) + assert params == {"timestamp": 1000} + + def test_tail_offset(self): + params = read_start_params(TailOffset(5)) + assert params == {"tail_offset": 5} + + +class TestReadLimitParams: + def test_none(self): + params = read_limit_params(None) + assert params == {} + + def test_count_only(self): + params = read_limit_params(ReadLimit(count=10)) + assert params == {"count": 10} + + def test_bytes_only(self): + params = read_limit_params(ReadLimit(bytes=1024)) + assert params == {"bytes": 1024} + + def test_both(self): + params = read_limit_params(ReadLimit(count=10, bytes=1024)) + assert params == {"count": 10, "bytes": 1024} + + +class TestMetricSetMapper: + def test_scalar(self): + data = { + "values": [ + {"scalar": {"name": "count", "unit": "operations", "value": 42.0}} + ] + } + result = metric_set_from_wire(data) + assert len(result) == 1 + assert isinstance(result[0], Scalar) + assert result[0].name == "count" + assert result[0].unit == MetricUnit.OPERATIONS + assert result[0].value == 42.0 + + def test_accumulation(self): + data = { + "values": [ + { + "accumulation": { + "name": "bytes_in", + "unit": "bytes", + "bucket_length": "hour", + "values": [[1700000000, 1024.0], [1700003600, 2048.0]], + } + } + ] + } + result = metric_set_from_wire(data) + assert len(result) == 1 + assert isinstance(result[0], Accumulation) + assert result[0].bucket_length == TimeseriesInterval.HOUR + assert len(result[0].values) == 2 + + def test_gauge(self): + data = { + "values": [ + { + "gauge": { + "name": "storage", + "unit": "bytes", + "values": [[1700000000, 5000.0]], + } + } + ] + } + result = metric_set_from_wire(data) + assert len(result) == 1 + assert isinstance(result[0], Gauge) + + def test_label(self): + data = { + "values": [ + {"label": {"name": "region", "values": ["us-east-1", "us-west-2"]}} + ] + } + result = metric_set_from_wire(data) + assert len(result) == 1 + assert isinstance(result[0], Label) + assert result[0].values == ["us-east-1", "us-west-2"] + + def test_mixed(self): + data = { + "values": [ + {"scalar": {"name": "a", "unit": "bytes", "value": 1.0}}, + {"label": {"name": "b", "values": ["x"]}}, + ] + } + result = metric_set_from_wire(data) + assert len(result) == 2 + assert isinstance(result[0], Scalar) + assert isinstance(result[1], Label) diff --git a/tests/test_protocol.py b/tests/test_protocol.py new file mode 100644 index 0000000..48bc275 --- /dev/null +++ b/tests/test_protocol.py @@ -0,0 +1,77 @@ +import pytest + +from s2_sdk._s2s._protocol import ( + COMPRESSION_NONE, + COMPRESSION_ZSTD, + decode_frame, + encode_frame, + maybe_compress, +) + + +class TestFrameEncoding: + def test_encode_decode_no_compression(self): + body = b"hello world" + frame = encode_frame(body) + decoded_body, terminal, compression = decode_frame(frame) + + assert decoded_body == body + assert terminal is False + assert compression == COMPRESSION_NONE + + def test_encode_decode_with_zstd(self): + body = b"hello world" * 100 + frame = encode_frame(body, compression=COMPRESSION_ZSTD) + decoded_body, terminal, compression = decode_frame(frame) + + assert decoded_body == body + assert terminal is False + + def test_terminal_frame(self): + body = b"\x00\x00some error" + frame = encode_frame(body, terminal=True) + decoded_body, terminal, compression = decode_frame(frame) + + assert terminal is True + # Terminal frames are not decompressed + assert decoded_body == body + + def test_frame_length_encoding(self): + # Verify 3-byte length prefix + body = b"x" * 256 + frame = encode_frame(body) + + # First 3 bytes are length (big-endian) + length = int.from_bytes(frame[0:3], "big") + assert length == 256 + + def test_empty_body(self): + frame = encode_frame(b"") + decoded_body, terminal, compression = decode_frame(frame) + + assert decoded_body == b"" + assert terminal is False + + def test_frame_too_short(self): + with pytest.raises(ValueError, match="Frame too short"): + decode_frame(b"\x00\x00") + + +class TestMaybeCompress: + def test_below_threshold(self): + body = b"small" + compressed, comp_code = maybe_compress(body) + assert compressed == body + assert comp_code == COMPRESSION_NONE + + def test_above_threshold(self): + body = b"x" * 2048 + compressed, comp_code = maybe_compress(body) + assert comp_code == COMPRESSION_ZSTD + assert len(compressed) < len(body) + + def test_no_compression_requested(self): + body = b"x" * 2048 + compressed, comp_code = maybe_compress(body, compression=COMPRESSION_NONE) + assert compressed == body + assert comp_code == COMPRESSION_NONE diff --git a/tests/test_stream_ops.py b/tests/test_stream_ops.py index a603df7..88ff540 100644 --- a/tests/test_stream_ops.py +++ b/tests/test_stream_ops.py @@ -4,8 +4,8 @@ import pytest -from streamstore import Stream -from streamstore.schemas import ( +from s2_sdk import S2Stream +from s2_sdk.types import ( AppendInput, ReadLimit, Record, @@ -14,29 +14,29 @@ TailOffset, Timestamp, ) -from streamstore.utils import metered_bytes +from s2_sdk.utils import metered_bytes @pytest.mark.stream class TestStreamOperations: - async def test_check_tail_empty_stream(self, stream: Stream): + async def test_check_tail_empty_stream(self, stream: S2Stream): tail = await stream.check_tail() assert tail.next_seq_num == 0 assert tail.last_timestamp == 0 - async def test_append_single_record(self, stream: Stream): + async def test_append_single_record(self, stream: S2Stream): input = AppendInput(records=[Record(body=b"record-0")]) output = await stream.append(input) - assert output.start_seq_num == 0 - assert output.end_seq_num == 1 - assert output.next_seq_num == 1 - assert output.start_timestamp > 0 - assert output.end_timestamp > 0 - assert output.last_timestamp > 0 + assert output.start.seq_num == 0 + assert output.end.seq_num == 1 + assert output.tail.seq_num == 1 + assert output.start.timestamp > 0 + assert output.end.timestamp > 0 + assert output.tail.timestamp > 0 - async def test_append_multiple_records(self, stream: Stream): + async def test_append_multiple_records(self, stream: S2Stream): input = AppendInput( records=[ Record(body=f"record-{i}".encode(), headers=[(b"key", b"value")]) @@ -45,27 +45,28 @@ async def test_append_multiple_records(self, stream: Stream): ) output = await stream.append(input) - assert output.start_seq_num == 0 - assert output.end_seq_num == 3 - assert output.next_seq_num == 3 + assert output.start.seq_num == 0 + assert output.end.seq_num == 3 + assert output.tail.seq_num == 3 - async def test_append_with_match_seq_num(self, stream: Stream): + async def test_append_with_match_seq_num(self, stream: S2Stream): input_0 = AppendInput(records=[Record(body=b"record-0")]) output_0 = await stream.append(input_0) input_1 = AppendInput( - records=[Record(body=b"record-1")], match_seq_num=output_0.next_seq_num + records=[Record(body=b"record-1")], + match_seq_num=output_0.tail.seq_num, ) output_1 = await stream.append(input_1) - assert output_1.start_seq_num == 1 - assert output_1.end_seq_num == 2 - assert output_1.next_seq_num == 2 + assert output_1.start.seq_num == 1 + assert output_1.end.seq_num == 2 + assert output_1.tail.seq_num == 2 - async def test_append_with_timestamp(self, stream: Stream): - timestamp_0 = int(time.time()) + async def test_append_with_timestamp(self, stream: S2Stream): + timestamp_0 = int(time.time() * 1000) await asyncio.sleep(0.1) - timestamp_1 = int(time.time()) + timestamp_1 = int(time.time() * 1000) input = AppendInput( records=[ @@ -75,14 +76,14 @@ async def test_append_with_timestamp(self, stream: Stream): ) output = await stream.append(input) - assert output.start_seq_num == 0 - assert output.start_timestamp == timestamp_0 - assert output.end_seq_num == 2 - assert output.end_timestamp == timestamp_1 - assert output.next_seq_num == 2 - assert output.last_timestamp == timestamp_1 + assert output.start.seq_num == 0 + assert output.start.timestamp == timestamp_0 + assert output.end.seq_num == 2 + assert output.end.timestamp == timestamp_1 + assert output.tail.seq_num == 2 + assert output.tail.timestamp == timestamp_1 - async def test_read_from_seq_num_zero(self, stream: Stream): + async def test_read_from_seq_num_zero(self, stream: S2Stream): await stream.append( AppendInput(records=[Record(body=f"record-{i}".encode()) for i in range(3)]) ) @@ -96,7 +97,7 @@ async def test_read_from_seq_num_zero(self, stream: Stream): assert record.seq_num == i assert record.body == f"record-{i}".encode() - async def test_read_with_limit(self, stream: Stream): + async def test_read_with_limit(self, stream: S2Stream): await stream.append( AppendInput(records=[Record(body=f"record-{i}".encode()) for i in range(5)]) ) @@ -112,15 +113,15 @@ async def test_read_with_limit(self, stream: Stream): total_bytes = sum(metered_bytes([r]) for r in records) assert total_bytes <= 20 - async def test_read_from_timestamp(self, stream: Stream): + async def test_read_from_timestamp(self, stream: S2Stream): output = await stream.append(AppendInput(records=[Record(body=b"record-0")])) - records = await stream.read(start=Timestamp(output.start_timestamp)) + records = await stream.read(start=Timestamp(output.start.timestamp)) assert isinstance(records, list) assert len(records) == 1 - async def test_read_from_tail_offset(self, stream: Stream): + async def test_read_from_tail_offset(self, stream: S2Stream): await stream.append( AppendInput(records=[Record(body=f"record-{i}".encode()) for i in range(5)]) ) @@ -132,7 +133,7 @@ async def test_read_from_tail_offset(self, stream: Stream): assert records[0].body == b"record-3" assert records[1].body == b"record-4" - async def test_read_until_timestamp(self, stream: Stream): + async def test_read_until_timestamp(self, stream: S2Stream): timestamp_0 = int(time.time() * 1000) await asyncio.sleep(0.2) timestamp_1 = int(time.time() * 1000) @@ -155,7 +156,7 @@ async def test_read_until_timestamp(self, stream: Stream): assert records[0].timestamp == timestamp_0 assert records[1].timestamp == timestamp_1 - async def test_read_beyond_tail(self, stream: Stream): + async def test_read_beyond_tail(self, stream: S2Stream): await stream.append( AppendInput(records=[Record(body=f"record-{i}".encode()) for i in range(5)]) ) @@ -165,7 +166,7 @@ async def test_read_beyond_tail(self, stream: Stream): assert isinstance(tail, Tail) assert tail.next_seq_num == 5 - async def test_append_session(self, stream: Stream): + async def test_append_session(self, stream: S2Stream): async def inputs_gen() -> AsyncIterable[AppendInput]: for i in range(3): records = [ @@ -181,10 +182,10 @@ async def inputs_gen() -> AsyncIterable[AppendInput]: exp_seq_num = 0 for output in outputs: - assert output.start_seq_num == exp_seq_num - exp_seq_num = output.end_seq_num + assert output.start.seq_num == exp_seq_num + exp_seq_num = output.end.seq_num - async def test_read_session_termination(self, stream: Stream): + async def test_read_session_termination(self, stream: S2Stream): await stream.append( AppendInput(records=[Record(body=f"record-{i}".encode()) for i in range(5)]) ) @@ -200,7 +201,7 @@ async def test_read_session_termination(self, stream: Stream): assert isinstance(outputs[0], list) assert len(outputs[0]) == 2 - async def test_read_session_tailing(self, stream: Stream): + async def test_read_session_tailing(self, stream: S2Stream): tail = await stream.check_tail() async def producer(): diff --git a/tests/test_validators.py b/tests/test_validators.py new file mode 100644 index 0000000..dc47c60 --- /dev/null +++ b/tests/test_validators.py @@ -0,0 +1,61 @@ +import pytest + +from s2_sdk._validators import validate_append_input, validate_basin +from s2_sdk.types import AppendInput, Record + + +class TestValidateBasin: + def test_valid_basin_names(self): + validate_basin("test-basin-name") + validate_basin("abcdefgh") + validate_basin("a1b2c3d4") + validate_basin("a" * 48) + + def test_too_short(self): + with pytest.raises(ValueError, match="Invalid basin name"): + validate_basin("short") + + def test_too_long(self): + with pytest.raises(ValueError, match="Invalid basin name"): + validate_basin("a" * 49) + + def test_uppercase(self): + with pytest.raises(ValueError, match="Invalid basin name"): + validate_basin("UPPERCASE") + + def test_starts_with_hyphen(self): + with pytest.raises(ValueError, match="Invalid basin name"): + validate_basin("-start-hyp") + + def test_ends_with_hyphen(self): + with pytest.raises(ValueError, match="Invalid basin name"): + validate_basin("end-hypen-") + + def test_invalid_characters(self): + with pytest.raises(ValueError, match="Invalid basin name"): + validate_basin("invalid_name") + + +class TestValidateAppendInput: + def test_valid_input(self): + input = AppendInput(records=[Record(body=b"hello")]) + validate_append_input(input) + + def test_empty_records(self): + with pytest.raises(ValueError, match="Invalid append input"): + validate_append_input(AppendInput(records=[])) + + def test_too_many_records(self): + records = [Record(body=b"x") for _ in range(1001)] + with pytest.raises(ValueError, match="Invalid append input"): + validate_append_input(AppendInput(records=records)) + + def test_max_records_ok(self): + records = [Record(body=b"x") for _ in range(1000)] + validate_append_input(AppendInput(records=records)) + + def test_exceeds_1mib(self): + # A single record with a body > 1 MiB + records = [Record(body=b"x" * (1024 * 1024 + 1))] + with pytest.raises(ValueError, match="Invalid append input"): + validate_append_input(AppendInput(records=records)) diff --git a/update_protos b/update_protos deleted file mode 100755 index e54278b..0000000 --- a/update_protos +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash -set -euo pipefail - -cd src/streamstore/_lib -uv run python -m grpc_tools.protoc -I../../../s2-specs --python_out=. --pyi_out=. --grpc_python_out=. ../../../s2-specs/s2/v1alpha/s2.proto -# workaround for https://github.com/protocolbuffers/protobuf/issues/7061 -find . -name '*.py' | xargs -I{} sed -i '' 's/from s2\.\(v[0-9][a-z0-9]*\) import s2_pb2 as s2_dot_\1_dot_s2__pb2/from streamstore._lib.s2.\1 import s2_pb2 as s2_dot_\1_dot_s2__pb2/' {} -uv run poe checker diff --git a/update_specs b/update_specs new file mode 100755 index 0000000..7e75790 --- /dev/null +++ b/update_specs @@ -0,0 +1,27 @@ +#!/bin/bash +set -euo pipefail + +# Step 1: OpenAPI → pydantic models (control plane) +uv run datamodel-codegen \ + --input s2-specs/s2/v1/openapi.json \ + --input-file-type openapi \ + --output src/s2_sdk/_generated/_models.py \ + --output-model-type pydantic_v2.BaseModel \ + --target-python-version 3.11 \ + --use-standard-collections \ + --use-union-operator + +# Step 2: Proto → protobuf messages (data plane) +cd src/s2_sdk/_generated +uv run python -m grpc_tools.protoc \ + -I../../../s2-specs \ + --python_out=. \ + --pyi_out=. \ + ../../../s2-specs/s2/v1/s2.proto + +# Fix import paths (same workaround as existing update_protos) +find . -name '*.py' | xargs -I{} sed -i '' 's/from s2\.\(v[0-9][a-z0-9]*\) import s2_pb2/from s2_sdk._generated.s2.\1 import s2_pb2/' {} +cd ../../.. + +# Step 3: Run checks +uv run poe checker diff --git a/uv.lock b/uv.lock index f50440b..da4b66c 100644 --- a/uv.lock +++ b/uv.lock @@ -2,7 +2,8 @@ version = 1 revision = 2 requires-python = ">=3.11" resolution-markers = [ - "python_full_version >= '3.13'", + "python_full_version >= '3.14'", + "python_full_version == '3.13.*'", "python_full_version < '3.13'", ] @@ -15,6 +16,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929, upload-time = "2024-07-26T18:15:02.05Z" }, ] +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + [[package]] name = "anyio" version = "4.8.0" @@ -57,6 +67,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/77/9f/fa9971d2a0c6fef64c87ba362a493a4f230eff4ea8dfb9f4c7cbdf71892e/apeye_core-1.1.5-py3-none-any.whl", hash = "sha256:dc27a93f8c9e246b3b238c5ea51edf6115ab2618ef029b9f2d9a190ec8228fbf", size = 99286, upload-time = "2024-01-30T17:45:46.764Z" }, ] +[[package]] +name = "argcomplete" +version = "3.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/38/61/0b9ae6399dd4a58d8c1b1dc5a27d6f2808023d0b5dd3104bb99f45a33ff6/argcomplete-3.6.3.tar.gz", hash = "sha256:62e8ed4fd6a45864acc8235409461b72c9a28ee785a2011cc5eb78318786c89c", size = 73754, upload-time = "2025-10-20T03:33:34.741Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/74/f5/9373290775639cb67a2fce7f629a1c240dce9f12fe927bc32b2736e16dfc/argcomplete-3.6.3-py3-none-any.whl", hash = "sha256:f5007b3a600ccac5d25bbce33089211dfd49eab4a7718da3f10e3082525a92ce", size = 43846, upload-time = "2025-10-20T03:33:33.021Z" }, +] + [[package]] name = "autodocsumm" version = "0.2.14" @@ -90,6 +109,43 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b1/fe/e8c672695b37eecc5cbf43e1d0638d88d66ba3a44c4d321c796f4e59167f/beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed", size = 147925, upload-time = "2024-01-17T16:53:12.779Z" }, ] +[[package]] +name = "black" +version = "26.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "mypy-extensions" }, + { name = "packaging" }, + { name = "pathspec" }, + { name = "platformdirs" }, + { name = "pytokens" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e1/c5/61175d618685d42b005847464b8fb4743a67b1b8fdb75e50e5a96c31a27a/black-26.3.1.tar.gz", hash = "sha256:2c50f5063a9641c7eed7795014ba37b0f5fa227f3d408b968936e24bc0566b07", size = 666155, upload-time = "2026-03-12T03:36:03.593Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/57/5f11c92861f9c92eb9dddf515530bc2d06db843e44bdcf1c83c1427824bc/black-26.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:28ef38aee69e4b12fda8dba75e21f9b4f979b490c8ac0baa7cb505369ac9e1ff", size = 1851987, upload-time = "2026-03-12T03:40:06.248Z" }, + { url = "https://files.pythonhosted.org/packages/54/aa/340a1463660bf6831f9e39646bf774086dbd8ca7fc3cded9d59bbdf4ad0a/black-26.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf9bf162ed91a26f1adba8efda0b573bc6924ec1408a52cc6f82cb73ec2b142c", size = 1689499, upload-time = "2026-03-12T03:40:07.642Z" }, + { url = "https://files.pythonhosted.org/packages/f3/01/b726c93d717d72733da031d2de10b92c9fa4c8d0c67e8a8a372076579279/black-26.3.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:474c27574d6d7037c1bc875a81d9be0a9a4f9ee95e62800dab3cfaadbf75acd5", size = 1754369, upload-time = "2026-03-12T03:40:09.279Z" }, + { url = "https://files.pythonhosted.org/packages/e3/09/61e91881ca291f150cfc9eb7ba19473c2e59df28859a11a88248b5cbbc4d/black-26.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:5e9d0d86df21f2e1677cc4bd090cd0e446278bcbbe49bf3659c308c3e402843e", size = 1413613, upload-time = "2026-03-12T03:40:10.943Z" }, + { url = "https://files.pythonhosted.org/packages/16/73/544f23891b22e7efe4d8f812371ab85b57f6a01b2fc45e3ba2e52ba985b8/black-26.3.1-cp311-cp311-win_arm64.whl", hash = "sha256:9a5e9f45e5d5e1c5b5c29b3bd4265dcc90e8b92cf4534520896ed77f791f4da5", size = 1219719, upload-time = "2026-03-12T03:40:12.597Z" }, + { url = "https://files.pythonhosted.org/packages/dc/f8/da5eae4fc75e78e6dceb60624e1b9662ab00d6b452996046dfa9b8a6025b/black-26.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e6f89631eb88a7302d416594a32faeee9fb8fb848290da9d0a5f2903519fc1", size = 1895920, upload-time = "2026-03-12T03:40:13.921Z" }, + { url = "https://files.pythonhosted.org/packages/2c/9f/04e6f26534da2e1629b2b48255c264cabf5eedc5141d04516d9d68a24111/black-26.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41cd2012d35b47d589cb8a16faf8a32ef7a336f56356babd9fcf70939ad1897f", size = 1718499, upload-time = "2026-03-12T03:40:15.239Z" }, + { url = "https://files.pythonhosted.org/packages/04/91/a5935b2a63e31b331060c4a9fdb5a6c725840858c599032a6f3aac94055f/black-26.3.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f76ff19ec5297dd8e66eb64deda23631e642c9393ab592826fd4bdc97a4bce7", size = 1794994, upload-time = "2026-03-12T03:40:17.124Z" }, + { url = "https://files.pythonhosted.org/packages/e7/0a/86e462cdd311a3c2a8ece708d22aba17d0b2a0d5348ca34b40cdcbea512e/black-26.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:ddb113db38838eb9f043623ba274cfaf7d51d5b0c22ecb30afe58b1bb8322983", size = 1420867, upload-time = "2026-03-12T03:40:18.83Z" }, + { url = "https://files.pythonhosted.org/packages/5b/e5/22515a19cb7eaee3440325a6b0d95d2c0e88dd180cb011b12ae488e031d1/black-26.3.1-cp312-cp312-win_arm64.whl", hash = "sha256:dfdd51fc3e64ea4f35873d1b3fb25326773d55d2329ff8449139ebaad7357efb", size = 1230124, upload-time = "2026-03-12T03:40:20.425Z" }, + { url = "https://files.pythonhosted.org/packages/f5/77/5728052a3c0450c53d9bb3945c4c46b91baa62b2cafab6801411b6271e45/black-26.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:855822d90f884905362f602880ed8b5df1b7e3ee7d0db2502d4388a954cc8c54", size = 1895034, upload-time = "2026-03-12T03:40:21.813Z" }, + { url = "https://files.pythonhosted.org/packages/52/73/7cae55fdfdfbe9d19e9a8d25d145018965fe2079fa908101c3733b0c55a0/black-26.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8a33d657f3276328ce00e4d37fe70361e1ec7614da5d7b6e78de5426cb56332f", size = 1718503, upload-time = "2026-03-12T03:40:23.666Z" }, + { url = "https://files.pythonhosted.org/packages/e1/87/af89ad449e8254fdbc74654e6467e3c9381b61472cc532ee350d28cfdafb/black-26.3.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f1cd08e99d2f9317292a311dfe578fd2a24b15dbce97792f9c4d752275c1fa56", size = 1793557, upload-time = "2026-03-12T03:40:25.497Z" }, + { url = "https://files.pythonhosted.org/packages/43/10/d6c06a791d8124b843bf325ab4ac7d2f5b98731dff84d6064eafd687ded1/black-26.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:c7e72339f841b5a237ff14f7d3880ddd0fc7f98a1199e8c4327f9a4f478c1839", size = 1422766, upload-time = "2026-03-12T03:40:27.14Z" }, + { url = "https://files.pythonhosted.org/packages/59/4f/40a582c015f2d841ac24fed6390bd68f0fc896069ff3a886317959c9daf8/black-26.3.1-cp313-cp313-win_arm64.whl", hash = "sha256:afc622538b430aa4c8c853f7f63bc582b3b8030fd8c80b70fb5fa5b834e575c2", size = 1232140, upload-time = "2026-03-12T03:40:28.882Z" }, + { url = "https://files.pythonhosted.org/packages/d5/da/e36e27c9cebc1311b7579210df6f1c86e50f2d7143ae4fcf8a5017dc8809/black-26.3.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2d6bfaf7fd0993b420bed691f20f9492d53ce9a2bcccea4b797d34e947318a78", size = 1889234, upload-time = "2026-03-12T03:40:30.964Z" }, + { url = "https://files.pythonhosted.org/packages/0e/7b/9871acf393f64a5fa33668c19350ca87177b181f44bb3d0c33b2d534f22c/black-26.3.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f89f2ab047c76a9c03f78d0d66ca519e389519902fa27e7a91117ef7611c0568", size = 1720522, upload-time = "2026-03-12T03:40:32.346Z" }, + { url = "https://files.pythonhosted.org/packages/03/87/e766c7f2e90c07fb7586cc787c9ae6462b1eedab390191f2b7fc7f6170a9/black-26.3.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b07fc0dab849d24a80a29cfab8d8a19187d1c4685d8a5e6385a5ce323c1f015f", size = 1787824, upload-time = "2026-03-12T03:40:33.636Z" }, + { url = "https://files.pythonhosted.org/packages/ac/94/2424338fb2d1875e9e83eed4c8e9c67f6905ec25afd826a911aea2b02535/black-26.3.1-cp314-cp314-win_amd64.whl", hash = "sha256:0126ae5b7c09957da2bdbd91a9ba1207453feada9e9fe51992848658c6c8e01c", size = 1445855, upload-time = "2026-03-12T03:40:35.442Z" }, + { url = "https://files.pythonhosted.org/packages/86/43/0c3338bd928afb8ee7471f1a4eec3bdbe2245ccb4a646092a222e8669840/black-26.3.1-cp314-cp314-win_arm64.whl", hash = "sha256:92c0ec1f2cc149551a2b7b47efc32c866406b6891b0ee4625e95967c8f4acfb1", size = 1258109, upload-time = "2026-03-12T03:40:36.832Z" }, + { url = "https://files.pythonhosted.org/packages/8e/0d/52d98722666d6fc6c3dd4c76df339501d6efd40e0ff95e6186a7b7f0befd/black-26.3.1-py3-none-any.whl", hash = "sha256:2bd5aa94fc267d38bb21a70d7410a89f1a1d318841855f698746f8e7f51acd1b", size = 207542, upload-time = "2026-03-12T03:36:01.668Z" }, +] + [[package]] name = "cachecontrol" version = "0.14.2" @@ -165,6 +221,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767, upload-time = "2024-12-24T18:12:32.852Z" }, ] +[[package]] +name = "click" +version = "8.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, +] + [[package]] name = "colorama" version = "0.4.6" @@ -186,6 +254,26 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a7/ec/bb273b7208c606890dc36540fe667d06ce840a6f62f9fae7e658fcdc90fb/cssutils-2.11.1-py3-none-any.whl", hash = "sha256:a67bfdfdff4f3867fab43698ec4897c1a828eca5973f4073321b3bccaf1199b1", size = 385747, upload-time = "2024-06-04T15:51:37.499Z" }, ] +[[package]] +name = "datamodel-code-generator" +version = "0.55.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "argcomplete" }, + { name = "black" }, + { name = "genson" }, + { name = "inflect" }, + { name = "isort" }, + { name = "jinja2" }, + { name = "pydantic" }, + { name = "pyyaml" }, + { name = "tomli", marker = "python_full_version < '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/33/36/ec505ce62c143c0f045e82e2bb0360e2ede765c0cfe3a70bf32c5661b8a2/datamodel_code_generator-0.55.0.tar.gz", hash = "sha256:20ae7a4fbbb12be380f0bd02544db4abae96c5b644d4b3f2b9c3fc0bc9ee1184", size = 833828, upload-time = "2026-03-10T20:41:15.796Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/c6/2abc9d11adbbf689b6b4dfb7a136d57b9ccaa3b3f1ba83504462109e8dbb/datamodel_code_generator-0.55.0-py3-none-any.whl", hash = "sha256:efa5a925288ca2a135fdc3361c7d774ae5b24b4fd632868363e249d55ea2f137", size = 256860, upload-time = "2026-03-10T20:41:13.488Z" }, +] + [[package]] name = "dict2css" version = "0.3.0.post1" @@ -275,15 +363,12 @@ wheels = [ ] [[package]] -name = "grpc-stubs" -version = "1.53.0.5" +name = "genson" +version = "1.3.0" source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "grpcio" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/55/8d/14c6b8c2fa5d82ffe96aed53b1c38e2a9fb6a57c5836966545f3080e5adc/grpc-stubs-1.53.0.5.tar.gz", hash = "sha256:3e1b642775cbc3e0c6332cfcedfccb022176db87e518757bef3a1241397be406", size = 14259, upload-time = "2023-12-28T02:13:29.19Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c5/cf/2303c8ad276dcf5ee2ad6cf69c4338fd86ef0f471a5207b069adf7a393cf/genson-1.3.0.tar.gz", hash = "sha256:e02db9ac2e3fd29e65b5286f7135762e2cd8a986537c075b06fc5f1517308e37", size = 34919, upload-time = "2024-05-15T22:08:49.123Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/86/147d2ccaf9b4b81407734b9abc1152aff39836e8e05be3bf069f9374c021/grpc_stubs-1.53.0.5-py3-none-any.whl", hash = "sha256:04183fb65a1b166a1febb9627e3d9647d3926ccc2dfe049fe7b6af243428dbe1", size = 16497, upload-time = "2023-12-28T02:13:27.556Z" }, + { url = "https://files.pythonhosted.org/packages/f8/5c/e226de133afd8bb267ec27eead9ae3d784b95b39a287ed404caab39a5f50/genson-1.3.0-py3-none-any.whl", hash = "sha256:468feccd00274cc7e4c09e84b08704270ba8d95232aa280f65b986139cec67f7", size = 21470, upload-time = "2024-05-15T22:08:47.056Z" }, ] [[package]] @@ -361,6 +446,37 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/27/e2/b419a02b50240143605f77cd50cb07f724caf0fd35a01540a4f044ae9f21/grpcio_tools-1.69.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9bae733654e0eb8ca83aa1d0d6b6c2f4a3525ce70d5ffc07df68d28f6520137", size = 1113616, upload-time = "2025-01-05T05:52:13.089Z" }, ] +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "h2" +version = "4.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "hpack" }, + { name = "hyperframe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1d/17/afa56379f94ad0fe8defd37d6eb3f89a25404ffc71d4d848893d270325fc/h2-4.3.0.tar.gz", hash = "sha256:6c59efe4323fa18b47a632221a1888bd7fde6249819beda254aeca909f221bf1", size = 2152026, upload-time = "2025-08-23T18:12:19.778Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/b2/119f6e6dcbd96f9069ce9a2665e0146588dc9f88f29549711853645e736a/h2-4.3.0-py3-none-any.whl", hash = "sha256:c438f029a25f7945c69e0ccf0fb951dc3f73a5f6412981daee861431b70e2bdd", size = 61779, upload-time = "2025-08-23T18:12:17.779Z" }, +] + +[[package]] +name = "hpack" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/48/71de9ed269fdae9c8057e5a4c0aa7402e8bb16f2c6e90b3aa53327b113f8/hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca", size = 51276, upload-time = "2025-01-22T21:44:58.347Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/c6/80c95b1b2b94682a72cbdbfb85b81ae2daffa4291fbfa1b1464502ede10d/hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496", size = 34357, upload-time = "2025-01-22T21:44:56.92Z" }, +] + [[package]] name = "html5lib" version = "1.1" @@ -374,6 +490,48 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6c/dd/a834df6482147d48e225a49515aabc28974ad5a4ca3215c18a882565b028/html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d", size = 112173, upload-time = "2020-06-22T23:32:36.781Z" }, ] +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[package.optional-dependencies] +http2 = [ + { name = "h2" }, +] + +[[package]] +name = "hyperframe" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/e7/94f8232d4a74cc99514c13a9f995811485a6903d48e5d952771ef6322e30/hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08", size = 26566, upload-time = "2025-01-22T21:41:49.302Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/30/47d0bf6072f7252e6521f3447ccfa40b421b6824517f82854703d0f5a98b/hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5", size = 13007, upload-time = "2025-01-22T21:41:47.295Z" }, +] + [[package]] name = "idna" version = "3.10" @@ -392,6 +550,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769, upload-time = "2022-07-01T12:21:02.467Z" }, ] +[[package]] +name = "inflect" +version = "7.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "more-itertools" }, + { name = "typeguard" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/c6/943357d44a21fd995723d07ccaddd78023eace03c1846049a2645d4324a3/inflect-7.5.0.tar.gz", hash = "sha256:faf19801c3742ed5a05a8ce388e0d8fe1a07f8d095c82201eb904f5d27ad571f", size = 73751, upload-time = "2024-12-28T17:11:18.897Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/eb/427ed2b20a38a4ee29f24dbe4ae2dafab198674fe9a85e3d6adf9e5f5f41/inflect-7.5.0-py3-none-any.whl", hash = "sha256:2aea70e5e70c35d8350b8097396ec155ffd68def678c7ff97f51aa69c1d92344", size = 35197, upload-time = "2024-12-28T17:11:15.931Z" }, +] + [[package]] name = "iniconfig" version = "2.1.0" @@ -401,6 +572,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, ] +[[package]] +name = "isort" +version = "8.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ef/7c/ec4ab396d31b3b395e2e999c8f46dec78c5e29209fac49d1f4dace04041d/isort-8.0.1.tar.gz", hash = "sha256:171ac4ff559cdc060bcfff550bc8404a486fee0caab245679c2abe7cb253c78d", size = 769592, upload-time = "2026-02-28T10:08:20.685Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3e/95/c7c34aa53c16353c56d0b802fba48d5f5caa2cdee7958acbcb795c830416/isort-8.0.1-py3-none-any.whl", hash = "sha256:28b89bc70f751b559aeca209e6120393d43fbe2490de0559662be7a9787e3d75", size = 89733, upload-time = "2026-02-28T10:08:19.466Z" }, +] + [[package]] name = "jinja2" version = "3.1.5" @@ -628,6 +808,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/aa/18/a8444036c6dd65ba3624c63b734d3ba95ba63ace513078e1580590075d21/pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364", size = 5955, upload-time = "2020-09-16T19:21:11.409Z" }, ] +[[package]] +name = "pathspec" +version = "1.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/36/e27608899f9b8d4dff0617b2d9ab17ca5608956ca44461ac14ac48b44015/pathspec-1.0.4.tar.gz", hash = "sha256:0210e2ae8a21a9137c0d470578cb0e595af87edaa6ebf12ff176f14a02e0e645", size = 131200, upload-time = "2026-01-27T03:59:46.938Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" }, +] + [[package]] name = "platformdirs" version = "4.3.6" @@ -673,6 +862,118 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fd/b2/ab07b09e0f6d143dfb839693aa05765257bceaa13d03bf1a696b78323e7a/protobuf-5.29.3-py3-none-any.whl", hash = "sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f", size = 172550, upload-time = "2025-01-08T21:38:50.439Z" }, ] +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" }, + { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" }, + { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" }, + { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" }, + { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" }, + { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" }, + { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" }, + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" }, + { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" }, + { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" }, + { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" }, + { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, +] + [[package]] name = "pygments" version = "2.19.1" @@ -735,6 +1036,40 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" }, ] +[[package]] +name = "pytokens" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b6/34/b4e015b99031667a7b960f888889c5bd34ef585c85e1cb56a594b92836ac/pytokens-0.4.1.tar.gz", hash = "sha256:292052fe80923aae2260c073f822ceba21f3872ced9a68bb7953b348e561179a", size = 23015, upload-time = "2026-01-30T01:03:45.924Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/92/790ebe03f07b57e53b10884c329b9a1a308648fc083a6d4a39a10a28c8fc/pytokens-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d70e77c55ae8380c91c0c18dea05951482e263982911fc7410b1ffd1dadd3440", size = 160864, upload-time = "2026-01-30T01:02:57.882Z" }, + { url = "https://files.pythonhosted.org/packages/13/25/a4f555281d975bfdd1eba731450e2fe3a95870274da73fb12c40aeae7625/pytokens-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a58d057208cb9075c144950d789511220b07636dd2e4708d5645d24de666bdc", size = 248565, upload-time = "2026-01-30T01:02:59.912Z" }, + { url = "https://files.pythonhosted.org/packages/17/50/bc0394b4ad5b1601be22fa43652173d47e4c9efbf0044c62e9a59b747c56/pytokens-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b49750419d300e2b5a3813cf229d4e5a4c728dae470bcc89867a9ad6f25a722d", size = 260824, upload-time = "2026-01-30T01:03:01.471Z" }, + { url = "https://files.pythonhosted.org/packages/4e/54/3e04f9d92a4be4fc6c80016bc396b923d2a6933ae94b5f557c939c460ee0/pytokens-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9907d61f15bf7261d7e775bd5d7ee4d2930e04424bab1972591918497623a16", size = 264075, upload-time = "2026-01-30T01:03:04.143Z" }, + { url = "https://files.pythonhosted.org/packages/d1/1b/44b0326cb5470a4375f37988aea5d61b5cc52407143303015ebee94abfd6/pytokens-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:ee44d0f85b803321710f9239f335aafe16553b39106384cef8e6de40cb4ef2f6", size = 103323, upload-time = "2026-01-30T01:03:05.412Z" }, + { url = "https://files.pythonhosted.org/packages/41/5d/e44573011401fb82e9d51e97f1290ceb377800fb4eed650b96f4753b499c/pytokens-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:140709331e846b728475786df8aeb27d24f48cbcf7bcd449f8de75cae7a45083", size = 160663, upload-time = "2026-01-30T01:03:06.473Z" }, + { url = "https://files.pythonhosted.org/packages/f0/e6/5bbc3019f8e6f21d09c41f8b8654536117e5e211a85d89212d59cbdab381/pytokens-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d6c4268598f762bc8e91f5dbf2ab2f61f7b95bdc07953b602db879b3c8c18e1", size = 255626, upload-time = "2026-01-30T01:03:08.177Z" }, + { url = "https://files.pythonhosted.org/packages/bf/3c/2d5297d82286f6f3d92770289fd439956b201c0a4fc7e72efb9b2293758e/pytokens-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:24afde1f53d95348b5a0eb19488661147285ca4dd7ed752bbc3e1c6242a304d1", size = 269779, upload-time = "2026-01-30T01:03:09.756Z" }, + { url = "https://files.pythonhosted.org/packages/20/01/7436e9ad693cebda0551203e0bf28f7669976c60ad07d6402098208476de/pytokens-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5ad948d085ed6c16413eb5fec6b3e02fa00dc29a2534f088d3302c47eb59adf9", size = 268076, upload-time = "2026-01-30T01:03:10.957Z" }, + { url = "https://files.pythonhosted.org/packages/2e/df/533c82a3c752ba13ae7ef238b7f8cdd272cf1475f03c63ac6cf3fcfb00b6/pytokens-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:3f901fe783e06e48e8cbdc82d631fca8f118333798193e026a50ce1b3757ea68", size = 103552, upload-time = "2026-01-30T01:03:12.066Z" }, + { url = "https://files.pythonhosted.org/packages/cb/dc/08b1a080372afda3cceb4f3c0a7ba2bde9d6a5241f1edb02a22a019ee147/pytokens-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8bdb9d0ce90cbf99c525e75a2fa415144fd570a1ba987380190e8b786bc6ef9b", size = 160720, upload-time = "2026-01-30T01:03:13.843Z" }, + { url = "https://files.pythonhosted.org/packages/64/0c/41ea22205da480837a700e395507e6a24425151dfb7ead73343d6e2d7ffe/pytokens-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5502408cab1cb18e128570f8d598981c68a50d0cbd7c61312a90507cd3a1276f", size = 254204, upload-time = "2026-01-30T01:03:14.886Z" }, + { url = "https://files.pythonhosted.org/packages/e0/d2/afe5c7f8607018beb99971489dbb846508f1b8f351fcefc225fcf4b2adc0/pytokens-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29d1d8fb1030af4d231789959f21821ab6325e463f0503a61d204343c9b355d1", size = 268423, upload-time = "2026-01-30T01:03:15.936Z" }, + { url = "https://files.pythonhosted.org/packages/68/d4/00ffdbd370410c04e9591da9220a68dc1693ef7499173eb3e30d06e05ed1/pytokens-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:970b08dd6b86058b6dc07efe9e98414f5102974716232d10f32ff39701e841c4", size = 266859, upload-time = "2026-01-30T01:03:17.458Z" }, + { url = "https://files.pythonhosted.org/packages/a7/c9/c3161313b4ca0c601eeefabd3d3b576edaa9afdefd32da97210700e47652/pytokens-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:9bd7d7f544d362576be74f9d5901a22f317efc20046efe2034dced238cbbfe78", size = 103520, upload-time = "2026-01-30T01:03:18.652Z" }, + { url = "https://files.pythonhosted.org/packages/8f/a7/b470f672e6fc5fee0a01d9e75005a0e617e162381974213a945fcd274843/pytokens-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4a14d5f5fc78ce85e426aa159489e2d5961acf0e47575e08f35584009178e321", size = 160821, upload-time = "2026-01-30T01:03:19.684Z" }, + { url = "https://files.pythonhosted.org/packages/80/98/e83a36fe8d170c911f864bfded690d2542bfcfacb9c649d11a9e6eb9dc41/pytokens-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f50fd18543be72da51dd505e2ed20d2228c74e0464e4262e4899797803d7fa", size = 254263, upload-time = "2026-01-30T01:03:20.834Z" }, + { url = "https://files.pythonhosted.org/packages/0f/95/70d7041273890f9f97a24234c00b746e8da86df462620194cef1d411ddeb/pytokens-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dc74c035f9bfca0255c1af77ddd2d6ae8419012805453e4b0e7513e17904545d", size = 268071, upload-time = "2026-01-30T01:03:21.888Z" }, + { url = "https://files.pythonhosted.org/packages/da/79/76e6d09ae19c99404656d7db9c35dfd20f2086f3eb6ecb496b5b31163bad/pytokens-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f66a6bbe741bd431f6d741e617e0f39ec7257ca1f89089593479347cc4d13324", size = 271716, upload-time = "2026-01-30T01:03:23.633Z" }, + { url = "https://files.pythonhosted.org/packages/79/37/482e55fa1602e0a7ff012661d8c946bafdc05e480ea5a32f4f7e336d4aa9/pytokens-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:b35d7e5ad269804f6697727702da3c517bb8a5228afa450ab0fa787732055fc9", size = 104539, upload-time = "2026-01-30T01:03:24.788Z" }, + { url = "https://files.pythonhosted.org/packages/30/e8/20e7db907c23f3d63b0be3b8a4fd1927f6da2395f5bcc7f72242bb963dfe/pytokens-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:8fcb9ba3709ff77e77f1c7022ff11d13553f3c30299a9fe246a166903e9091eb", size = 168474, upload-time = "2026-01-30T01:03:26.428Z" }, + { url = "https://files.pythonhosted.org/packages/d6/81/88a95ee9fafdd8f5f3452107748fd04c24930d500b9aba9738f3ade642cc/pytokens-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79fc6b8699564e1f9b521582c35435f1bd32dd06822322ec44afdeba666d8cb3", size = 290473, upload-time = "2026-01-30T01:03:27.415Z" }, + { url = "https://files.pythonhosted.org/packages/cf/35/3aa899645e29b6375b4aed9f8d21df219e7c958c4c186b465e42ee0a06bf/pytokens-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d31b97b3de0f61571a124a00ffe9a81fb9939146c122c11060725bd5aea79975", size = 303485, upload-time = "2026-01-30T01:03:28.558Z" }, + { url = "https://files.pythonhosted.org/packages/52/a0/07907b6ff512674d9b201859f7d212298c44933633c946703a20c25e9d81/pytokens-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:967cf6e3fd4adf7de8fc73cd3043754ae79c36475c1c11d514fc72cf5490094a", size = 306698, upload-time = "2026-01-30T01:03:29.653Z" }, + { url = "https://files.pythonhosted.org/packages/39/2a/cbbf9250020a4a8dd53ba83a46c097b69e5eb49dd14e708f496f548c6612/pytokens-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:584c80c24b078eec1e227079d56dc22ff755e0ba8654d8383b2c549107528918", size = 116287, upload-time = "2026-01-30T01:03:30.912Z" }, + { url = "https://files.pythonhosted.org/packages/c6/78/397db326746f0a342855b81216ae1f0a32965deccfd7c830a2dbc66d2483/pytokens-0.4.1-py3-none-any.whl", hash = "sha256:26cef14744a8385f35d0e095dc8b3a7583f6c953c2e3d269c7f82484bf5ad2de", size = 13729, upload-time = "2026-01-30T01:03:45.029Z" }, +] + [[package]] name = "pyyaml" version = "6.0.2" @@ -857,6 +1192,73 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b2/94/0498cdb7316ed67a1928300dd87d659c933479f44dec51b4f62bfd1f8028/ruff-0.9.1-py3-none-win_arm64.whl", hash = "sha256:1cd76c7f9c679e6e8f2af8f778367dca82b95009bc7b1a85a47f1521ae524fa7", size = 9145708, upload-time = "2025-01-10T18:57:51.308Z" }, ] +[[package]] +name = "s2-sdk" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "anyio" }, + { name = "httpx", extra = ["http2"] }, + { name = "protobuf" }, + { name = "pydantic" }, + { name = "zstandard" }, +] + +[package.dev-dependencies] +dev = [ + { name = "datamodel-code-generator" }, + { name = "grpcio-tools" }, + { name = "mypy" }, + { name = "poethepoet" }, + { name = "ruff" }, + { name = "types-protobuf" }, +] +docs = [ + { name = "enum-tools", extra = ["sphinx"] }, + { name = "furo" }, + { name = "myst-parser" }, + { name = "sphinx" }, + { name = "sphinx-autodoc-typehints" }, +] +test = [ + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-timeout" }, + { name = "pytest-xdist" }, +] + +[package.metadata] +requires-dist = [ + { name = "anyio", specifier = ">=4.8.0" }, + { name = "httpx", extras = ["http2"], specifier = ">=0.28.0" }, + { name = "protobuf", specifier = ">=5.29.0" }, + { name = "pydantic", specifier = ">=2.0" }, + { name = "zstandard", specifier = ">=0.23.0" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "datamodel-code-generator", specifier = ">=0.28.0" }, + { name = "grpcio-tools", specifier = ">=1.69.0" }, + { name = "mypy", specifier = ">=1.14.1" }, + { name = "poethepoet", specifier = ">=0.36.0" }, + { name = "ruff", specifier = ">=0.9.1" }, + { name = "types-protobuf", specifier = ">=5.29.1.20241207" }, +] +docs = [ + { name = "enum-tools", extras = ["sphinx"], specifier = ">=0.12.0" }, + { name = "furo", specifier = ">=2024.8.6" }, + { name = "myst-parser", specifier = ">=4.0.0" }, + { name = "sphinx", specifier = "==8.1.3" }, + { name = "sphinx-autodoc-typehints", specifier = ">=3.0.0" }, +] +test = [ + { name = "pytest", specifier = ">=8.0.0" }, + { name = "pytest-asyncio", specifier = ">=0.23.0" }, + { name = "pytest-timeout", specifier = ">=2.3.0" }, + { name = "pytest-xdist", specifier = ">=3.5.0" }, +] + [[package]] name = "setuptools" version = "75.8.0" @@ -1089,67 +1491,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fb/d0/9852f70eb01f814843530c053542b72d30e9fbf74da7abb0107e71938389/standard_imghdr-3.10.14-py3-none-any.whl", hash = "sha256:cdf6883163349624dee9a81d2853a20260337c4cd41c04e99c082e01833a08e2", size = 5598, upload-time = "2024-04-21T18:54:48.587Z" }, ] -[[package]] -name = "streamstore" -version = "5.0.0" -source = { editable = "." } -dependencies = [ - { name = "anyio" }, - { name = "grpc-stubs" }, - { name = "grpcio" }, - { name = "grpcio-tools" }, - { name = "types-protobuf" }, -] - -[package.dev-dependencies] -dev = [ - { name = "mypy" }, - { name = "poethepoet" }, - { name = "ruff" }, -] -docs = [ - { name = "enum-tools", extra = ["sphinx"] }, - { name = "furo" }, - { name = "myst-parser" }, - { name = "sphinx" }, - { name = "sphinx-autodoc-typehints" }, -] -test = [ - { name = "pytest" }, - { name = "pytest-asyncio" }, - { name = "pytest-timeout" }, - { name = "pytest-xdist" }, -] - -[package.metadata] -requires-dist = [ - { name = "anyio", specifier = ">=4.8.0" }, - { name = "grpc-stubs", specifier = ">=1.53.0.5" }, - { name = "grpcio", specifier = ">=1.69.0" }, - { name = "grpcio-tools", specifier = ">=1.69.0" }, - { name = "types-protobuf", specifier = ">=5.29.1.20241207" }, -] - -[package.metadata.requires-dev] -dev = [ - { name = "mypy", specifier = ">=1.14.1" }, - { name = "poethepoet", specifier = ">=0.36.0" }, - { name = "ruff", specifier = ">=0.9.1" }, -] -docs = [ - { name = "enum-tools", extras = ["sphinx"], specifier = ">=0.12.0" }, - { name = "furo", specifier = ">=2024.8.6" }, - { name = "myst-parser", specifier = ">=4.0.0" }, - { name = "sphinx", specifier = "==8.1.3" }, - { name = "sphinx-autodoc-typehints", specifier = ">=3.0.0" }, -] -test = [ - { name = "pytest", specifier = ">=8.0.0" }, - { name = "pytest-asyncio", specifier = ">=0.23.0" }, - { name = "pytest-timeout", specifier = ">=2.3.0" }, - { name = "pytest-xdist", specifier = ">=3.5.0" }, -] - [[package]] name = "tabulate" version = "0.9.0" @@ -1159,22 +1500,100 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252, upload-time = "2022-10-06T17:21:44.262Z" }, ] +[[package]] +name = "tomli" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/82/30/31573e9457673ab10aa432461bee537ce6cef177667deca369efb79df071/tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c", size = 17477, upload-time = "2026-01-11T11:22:38.165Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/d9/3dc2289e1f3b32eb19b9785b6a006b28ee99acb37d1d47f78d4c10e28bf8/tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867", size = 153663, upload-time = "2026-01-11T11:21:45.27Z" }, + { url = "https://files.pythonhosted.org/packages/51/32/ef9f6845e6b9ca392cd3f64f9ec185cc6f09f0a2df3db08cbe8809d1d435/tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9", size = 148469, upload-time = "2026-01-11T11:21:46.873Z" }, + { url = "https://files.pythonhosted.org/packages/d6/c2/506e44cce89a8b1b1e047d64bd495c22c9f71f21e05f380f1a950dd9c217/tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95", size = 236039, upload-time = "2026-01-11T11:21:48.503Z" }, + { url = "https://files.pythonhosted.org/packages/b3/40/e1b65986dbc861b7e986e8ec394598187fa8aee85b1650b01dd925ca0be8/tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76", size = 243007, upload-time = "2026-01-11T11:21:49.456Z" }, + { url = "https://files.pythonhosted.org/packages/9c/6f/6e39ce66b58a5b7ae572a0f4352ff40c71e8573633deda43f6a379d56b3e/tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d", size = 240875, upload-time = "2026-01-11T11:21:50.755Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ad/cb089cb190487caa80204d503c7fd0f4d443f90b95cf4ef5cf5aa0f439b0/tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576", size = 246271, upload-time = "2026-01-11T11:21:51.81Z" }, + { url = "https://files.pythonhosted.org/packages/0b/63/69125220e47fd7a3a27fd0de0c6398c89432fec41bc739823bcc66506af6/tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a", size = 96770, upload-time = "2026-01-11T11:21:52.647Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0d/a22bb6c83f83386b0008425a6cd1fa1c14b5f3dd4bad05e98cf3dbbf4a64/tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa", size = 107626, upload-time = "2026-01-11T11:21:53.459Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6d/77be674a3485e75cacbf2ddba2b146911477bd887dda9d8c9dfb2f15e871/tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614", size = 94842, upload-time = "2026-01-11T11:21:54.831Z" }, + { url = "https://files.pythonhosted.org/packages/3c/43/7389a1869f2f26dba52404e1ef13b4784b6b37dac93bac53457e3ff24ca3/tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1", size = 154894, upload-time = "2026-01-11T11:21:56.07Z" }, + { url = "https://files.pythonhosted.org/packages/e9/05/2f9bf110b5294132b2edf13fe6ca6ae456204f3d749f623307cbb7a946f2/tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8", size = 149053, upload-time = "2026-01-11T11:21:57.467Z" }, + { url = "https://files.pythonhosted.org/packages/e8/41/1eda3ca1abc6f6154a8db4d714a4d35c4ad90adc0bcf700657291593fbf3/tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a", size = 243481, upload-time = "2026-01-11T11:21:58.661Z" }, + { url = "https://files.pythonhosted.org/packages/d2/6d/02ff5ab6c8868b41e7d4b987ce2b5f6a51d3335a70aa144edd999e055a01/tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1", size = 251720, upload-time = "2026-01-11T11:22:00.178Z" }, + { url = "https://files.pythonhosted.org/packages/7b/57/0405c59a909c45d5b6f146107c6d997825aa87568b042042f7a9c0afed34/tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b", size = 247014, upload-time = "2026-01-11T11:22:01.238Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0e/2e37568edd944b4165735687cbaf2fe3648129e440c26d02223672ee0630/tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51", size = 251820, upload-time = "2026-01-11T11:22:02.727Z" }, + { url = "https://files.pythonhosted.org/packages/5a/1c/ee3b707fdac82aeeb92d1a113f803cf6d0f37bdca0849cb489553e1f417a/tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729", size = 97712, upload-time = "2026-01-11T11:22:03.777Z" }, + { url = "https://files.pythonhosted.org/packages/69/13/c07a9177d0b3bab7913299b9278845fc6eaaca14a02667c6be0b0a2270c8/tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da", size = 108296, upload-time = "2026-01-11T11:22:04.86Z" }, + { url = "https://files.pythonhosted.org/packages/18/27/e267a60bbeeee343bcc279bb9e8fbed0cbe224bc7b2a3dc2975f22809a09/tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3", size = 94553, upload-time = "2026-01-11T11:22:05.854Z" }, + { url = "https://files.pythonhosted.org/packages/34/91/7f65f9809f2936e1f4ce6268ae1903074563603b2a2bd969ebbda802744f/tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0", size = 154915, upload-time = "2026-01-11T11:22:06.703Z" }, + { url = "https://files.pythonhosted.org/packages/20/aa/64dd73a5a849c2e8f216b755599c511badde80e91e9bc2271baa7b2cdbb1/tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e", size = 149038, upload-time = "2026-01-11T11:22:07.56Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8a/6d38870bd3d52c8d1505ce054469a73f73a0fe62c0eaf5dddf61447e32fa/tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4", size = 242245, upload-time = "2026-01-11T11:22:08.344Z" }, + { url = "https://files.pythonhosted.org/packages/59/bb/8002fadefb64ab2669e5b977df3f5e444febea60e717e755b38bb7c41029/tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e", size = 250335, upload-time = "2026-01-11T11:22:09.951Z" }, + { url = "https://files.pythonhosted.org/packages/a5/3d/4cdb6f791682b2ea916af2de96121b3cb1284d7c203d97d92d6003e91c8d/tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c", size = 245962, upload-time = "2026-01-11T11:22:11.27Z" }, + { url = "https://files.pythonhosted.org/packages/f2/4a/5f25789f9a460bd858ba9756ff52d0830d825b458e13f754952dd15fb7bb/tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f", size = 250396, upload-time = "2026-01-11T11:22:12.325Z" }, + { url = "https://files.pythonhosted.org/packages/aa/2f/b73a36fea58dfa08e8b3a268750e6853a6aac2a349241a905ebd86f3047a/tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86", size = 97530, upload-time = "2026-01-11T11:22:13.865Z" }, + { url = "https://files.pythonhosted.org/packages/3b/af/ca18c134b5d75de7e8dc551c5234eaba2e8e951f6b30139599b53de9c187/tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87", size = 108227, upload-time = "2026-01-11T11:22:15.224Z" }, + { url = "https://files.pythonhosted.org/packages/22/c3/b386b832f209fee8073c8138ec50f27b4460db2fdae9ffe022df89a57f9b/tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132", size = 94748, upload-time = "2026-01-11T11:22:16.009Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c4/84047a97eb1004418bc10bdbcfebda209fca6338002eba2dc27cc6d13563/tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6", size = 154725, upload-time = "2026-01-11T11:22:17.269Z" }, + { url = "https://files.pythonhosted.org/packages/a8/5d/d39038e646060b9d76274078cddf146ced86dc2b9e8bbf737ad5983609a0/tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc", size = 148901, upload-time = "2026-01-11T11:22:18.287Z" }, + { url = "https://files.pythonhosted.org/packages/73/e5/383be1724cb30f4ce44983d249645684a48c435e1cd4f8b5cded8a816d3c/tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66", size = 243375, upload-time = "2026-01-11T11:22:19.154Z" }, + { url = "https://files.pythonhosted.org/packages/31/f0/bea80c17971c8d16d3cc109dc3585b0f2ce1036b5f4a8a183789023574f2/tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d", size = 250639, upload-time = "2026-01-11T11:22:20.168Z" }, + { url = "https://files.pythonhosted.org/packages/2c/8f/2853c36abbb7608e3f945d8a74e32ed3a74ee3a1f468f1ffc7d1cb3abba6/tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702", size = 246897, upload-time = "2026-01-11T11:22:21.544Z" }, + { url = "https://files.pythonhosted.org/packages/49/f0/6c05e3196ed5337b9fe7ea003e95fd3819a840b7a0f2bf5a408ef1dad8ed/tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8", size = 254697, upload-time = "2026-01-11T11:22:23.058Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f5/2922ef29c9f2951883525def7429967fc4d8208494e5ab524234f06b688b/tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776", size = 98567, upload-time = "2026-01-11T11:22:24.033Z" }, + { url = "https://files.pythonhosted.org/packages/7b/31/22b52e2e06dd2a5fdbc3ee73226d763b184ff21fc24e20316a44ccc4d96b/tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475", size = 108556, upload-time = "2026-01-11T11:22:25.378Z" }, + { url = "https://files.pythonhosted.org/packages/48/3d/5058dff3255a3d01b705413f64f4306a141a8fd7a251e5a495e3f192a998/tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2", size = 96014, upload-time = "2026-01-11T11:22:26.138Z" }, + { url = "https://files.pythonhosted.org/packages/b8/4e/75dab8586e268424202d3a1997ef6014919c941b50642a1682df43204c22/tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9", size = 163339, upload-time = "2026-01-11T11:22:27.143Z" }, + { url = "https://files.pythonhosted.org/packages/06/e3/b904d9ab1016829a776d97f163f183a48be6a4deb87304d1e0116a349519/tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0", size = 159490, upload-time = "2026-01-11T11:22:28.399Z" }, + { url = "https://files.pythonhosted.org/packages/e3/5a/fc3622c8b1ad823e8ea98a35e3c632ee316d48f66f80f9708ceb4f2a0322/tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df", size = 269398, upload-time = "2026-01-11T11:22:29.345Z" }, + { url = "https://files.pythonhosted.org/packages/fd/33/62bd6152c8bdd4c305ad9faca48f51d3acb2df1f8791b1477d46ff86e7f8/tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d", size = 276515, upload-time = "2026-01-11T11:22:30.327Z" }, + { url = "https://files.pythonhosted.org/packages/4b/ff/ae53619499f5235ee4211e62a8d7982ba9e439a0fb4f2f351a93d67c1dd2/tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f", size = 273806, upload-time = "2026-01-11T11:22:32.56Z" }, + { url = "https://files.pythonhosted.org/packages/47/71/cbca7787fa68d4d0a9f7072821980b39fbb1b6faeb5f5cf02f4a5559fa28/tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b", size = 281340, upload-time = "2026-01-11T11:22:33.505Z" }, + { url = "https://files.pythonhosted.org/packages/f5/00/d595c120963ad42474cf6ee7771ad0d0e8a49d0f01e29576ee9195d9ecdf/tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087", size = 108106, upload-time = "2026-01-11T11:22:34.451Z" }, + { url = "https://files.pythonhosted.org/packages/de/69/9aa0c6a505c2f80e519b43764f8b4ba93b5a0bbd2d9a9de6e2b24271b9a5/tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd", size = 120504, upload-time = "2026-01-11T11:22:35.764Z" }, + { url = "https://files.pythonhosted.org/packages/b3/9f/f1668c281c58cfae01482f7114a4b88d345e4c140386241a1a24dcc9e7bc/tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4", size = 99561, upload-time = "2026-01-11T11:22:36.624Z" }, + { url = "https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a", size = 14477, upload-time = "2026-01-11T11:22:37.446Z" }, +] + +[[package]] +name = "typeguard" +version = "4.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2b/e8/66e25efcc18542d58706ce4e50415710593721aae26e794ab1dec34fb66f/typeguard-4.5.1.tar.gz", hash = "sha256:f6f8ecbbc819c9bc749983cc67c02391e16a9b43b8b27f15dc70ed7c4a007274", size = 80121, upload-time = "2026-02-19T16:09:03.392Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/88/b55b3117287a8540b76dbdd87733808d4d01c8067a3b339408c250bb3600/typeguard-4.5.1-py3-none-any.whl", hash = "sha256:44d2bf329d49a244110a090b55f5f91aa82d9a9834ebfd30bcc73651e4a8cc40", size = 36745, upload-time = "2026-02-19T16:09:01.6Z" }, +] + [[package]] name = "types-protobuf" -version = "5.29.1.20241207" +version = "6.32.1.20260221" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/70/89/b661a447139f665ccea8e39bfdd52a92f803df4b5de0e6001a3537feaacb/types_protobuf-5.29.1.20241207.tar.gz", hash = "sha256:2ebcadb8ab3ef2e3e2f067e0882906d64ba0dc65fc5b0fd7a8b692315b4a0be9", size = 59190, upload-time = "2024-12-07T02:54:37.951Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5f/e2/9aa4a3b2469508bd7b4e2ae11cbedaf419222a09a1b94daffcd5efca4023/types_protobuf-6.32.1.20260221.tar.gz", hash = "sha256:6d5fb060a616bfb076cbb61b4b3c3969f5fc8bec5810f9a2f7e648ee5cbcbf6e", size = 64408, upload-time = "2026-02-21T03:55:13.916Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/6e/cdf152187019d6f6d04066b23e48659d961b527e9c6d43b48459d160e332/types_protobuf-5.29.1.20241207-py3-none-any.whl", hash = "sha256:92893c42083e9b718c678badc0af7a9a1307b92afe1599e5cba5f3d35b668b2f", size = 73902, upload-time = "2024-12-07T02:54:36.069Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e8/1fd38926f9cf031188fbc5a96694203ea6f24b0e34bd64a225ec6f6291ba/types_protobuf-6.32.1.20260221-py3-none-any.whl", hash = "sha256:da7cdd947975964a93c30bfbcc2c6841ee646b318d3816b033adc2c4eb6448e4", size = 77956, upload-time = "2026-02-21T03:55:12.894Z" }, ] [[package]] name = "typing-extensions" -version = "4.12.2" +version = "4.15.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321, upload-time = "2024-06-07T18:52:15.995Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438, upload-time = "2024-06-07T18:52:13.582Z" }, + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, ] [[package]] @@ -1194,3 +1613,77 @@ sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda5308 wheels = [ { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" }, ] + +[[package]] +name = "zstandard" +version = "0.25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fd/aa/3e0508d5a5dd96529cdc5a97011299056e14c6505b678fd58938792794b1/zstandard-0.25.0.tar.gz", hash = "sha256:7713e1179d162cf5c7906da876ec2ccb9c3a9dcbdffef0cc7f70c3667a205f0b", size = 711513, upload-time = "2025-09-14T22:15:54.002Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/83/c3ca27c363d104980f1c9cee1101cc8ba724ac8c28a033ede6aab89585b1/zstandard-0.25.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:933b65d7680ea337180733cf9e87293cc5500cc0eb3fc8769f4d3c88d724ec5c", size = 795254, upload-time = "2025-09-14T22:16:26.137Z" }, + { url = "https://files.pythonhosted.org/packages/ac/4d/e66465c5411a7cf4866aeadc7d108081d8ceba9bc7abe6b14aa21c671ec3/zstandard-0.25.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3f79487c687b1fc69f19e487cd949bf3aae653d181dfb5fde3bf6d18894706f", size = 640559, upload-time = "2025-09-14T22:16:27.973Z" }, + { url = "https://files.pythonhosted.org/packages/12/56/354fe655905f290d3b147b33fe946b0f27e791e4b50a5f004c802cb3eb7b/zstandard-0.25.0-cp311-cp311-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:0bbc9a0c65ce0eea3c34a691e3c4b6889f5f3909ba4822ab385fab9057099431", size = 5348020, upload-time = "2025-09-14T22:16:29.523Z" }, + { url = "https://files.pythonhosted.org/packages/3b/13/2b7ed68bd85e69a2069bcc72141d378f22cae5a0f3b353a2c8f50ef30c1b/zstandard-0.25.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:01582723b3ccd6939ab7b3a78622c573799d5d8737b534b86d0e06ac18dbde4a", size = 5058126, upload-time = "2025-09-14T22:16:31.811Z" }, + { url = "https://files.pythonhosted.org/packages/c9/dd/fdaf0674f4b10d92cb120ccff58bbb6626bf8368f00ebfd2a41ba4a0dc99/zstandard-0.25.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5f1ad7bf88535edcf30038f6919abe087f606f62c00a87d7e33e7fc57cb69fcc", size = 5405390, upload-time = "2025-09-14T22:16:33.486Z" }, + { url = "https://files.pythonhosted.org/packages/0f/67/354d1555575bc2490435f90d67ca4dd65238ff2f119f30f72d5cde09c2ad/zstandard-0.25.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:06acb75eebeedb77b69048031282737717a63e71e4ae3f77cc0c3b9508320df6", size = 5452914, upload-time = "2025-09-14T22:16:35.277Z" }, + { url = "https://files.pythonhosted.org/packages/bb/1f/e9cfd801a3f9190bf3e759c422bbfd2247db9d7f3d54a56ecde70137791a/zstandard-0.25.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9300d02ea7c6506f00e627e287e0492a5eb0371ec1670ae852fefffa6164b072", size = 5559635, upload-time = "2025-09-14T22:16:37.141Z" }, + { url = "https://files.pythonhosted.org/packages/21/88/5ba550f797ca953a52d708c8e4f380959e7e3280af029e38fbf47b55916e/zstandard-0.25.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfd06b1c5584b657a2892a6014c2f4c20e0db0208c159148fa78c65f7e0b0277", size = 5048277, upload-time = "2025-09-14T22:16:38.807Z" }, + { url = "https://files.pythonhosted.org/packages/46/c0/ca3e533b4fa03112facbe7fbe7779cb1ebec215688e5df576fe5429172e0/zstandard-0.25.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f373da2c1757bb7f1acaf09369cdc1d51d84131e50d5fa9863982fd626466313", size = 5574377, upload-time = "2025-09-14T22:16:40.523Z" }, + { url = "https://files.pythonhosted.org/packages/12/9b/3fb626390113f272abd0799fd677ea33d5fc3ec185e62e6be534493c4b60/zstandard-0.25.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6c0e5a65158a7946e7a7affa6418878ef97ab66636f13353b8502d7ea03c8097", size = 4961493, upload-time = "2025-09-14T22:16:43.3Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d3/23094a6b6a4b1343b27ae68249daa17ae0651fcfec9ed4de09d14b940285/zstandard-0.25.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c8e167d5adf59476fa3e37bee730890e389410c354771a62e3c076c86f9f7778", size = 5269018, upload-time = "2025-09-14T22:16:45.292Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a7/bb5a0c1c0f3f4b5e9d5b55198e39de91e04ba7c205cc46fcb0f95f0383c1/zstandard-0.25.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:98750a309eb2f020da61e727de7d7ba3c57c97cf6213f6f6277bb7fb42a8e065", size = 5443672, upload-time = "2025-09-14T22:16:47.076Z" }, + { url = "https://files.pythonhosted.org/packages/27/22/503347aa08d073993f25109c36c8d9f029c7d5949198050962cb568dfa5e/zstandard-0.25.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:22a086cff1b6ceca18a8dd6096ec631e430e93a8e70a9ca5efa7561a00f826fa", size = 5822753, upload-time = "2025-09-14T22:16:49.316Z" }, + { url = "https://files.pythonhosted.org/packages/e2/be/94267dc6ee64f0f8ba2b2ae7c7a2df934a816baaa7291db9e1aa77394c3c/zstandard-0.25.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:72d35d7aa0bba323965da807a462b0966c91608ef3a48ba761678cb20ce5d8b7", size = 5366047, upload-time = "2025-09-14T22:16:51.328Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a3/732893eab0a3a7aecff8b99052fecf9f605cf0fb5fb6d0290e36beee47a4/zstandard-0.25.0-cp311-cp311-win32.whl", hash = "sha256:f5aeea11ded7320a84dcdd62a3d95b5186834224a9e55b92ccae35d21a8b63d4", size = 436484, upload-time = "2025-09-14T22:16:55.005Z" }, + { url = "https://files.pythonhosted.org/packages/43/a3/c6155f5c1cce691cb80dfd38627046e50af3ee9ddc5d0b45b9b063bfb8c9/zstandard-0.25.0-cp311-cp311-win_amd64.whl", hash = "sha256:daab68faadb847063d0c56f361a289c4f268706b598afbf9ad113cbe5c38b6b2", size = 506183, upload-time = "2025-09-14T22:16:52.753Z" }, + { url = "https://files.pythonhosted.org/packages/8c/3e/8945ab86a0820cc0e0cdbf38086a92868a9172020fdab8a03ac19662b0e5/zstandard-0.25.0-cp311-cp311-win_arm64.whl", hash = "sha256:22a06c5df3751bb7dc67406f5374734ccee8ed37fc5981bf1ad7041831fa1137", size = 462533, upload-time = "2025-09-14T22:16:53.878Z" }, + { url = "https://files.pythonhosted.org/packages/82/fc/f26eb6ef91ae723a03e16eddb198abcfce2bc5a42e224d44cc8b6765e57e/zstandard-0.25.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7b3c3a3ab9daa3eed242d6ecceead93aebbb8f5f84318d82cee643e019c4b73b", size = 795738, upload-time = "2025-09-14T22:16:56.237Z" }, + { url = "https://files.pythonhosted.org/packages/aa/1c/d920d64b22f8dd028a8b90e2d756e431a5d86194caa78e3819c7bf53b4b3/zstandard-0.25.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:913cbd31a400febff93b564a23e17c3ed2d56c064006f54efec210d586171c00", size = 640436, upload-time = "2025-09-14T22:16:57.774Z" }, + { url = "https://files.pythonhosted.org/packages/53/6c/288c3f0bd9fcfe9ca41e2c2fbfd17b2097f6af57b62a81161941f09afa76/zstandard-0.25.0-cp312-cp312-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:011d388c76b11a0c165374ce660ce2c8efa8e5d87f34996aa80f9c0816698b64", size = 5343019, upload-time = "2025-09-14T22:16:59.302Z" }, + { url = "https://files.pythonhosted.org/packages/1e/15/efef5a2f204a64bdb5571e6161d49f7ef0fffdbca953a615efbec045f60f/zstandard-0.25.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6dffecc361d079bb48d7caef5d673c88c8988d3d33fb74ab95b7ee6da42652ea", size = 5063012, upload-time = "2025-09-14T22:17:01.156Z" }, + { url = "https://files.pythonhosted.org/packages/b7/37/a6ce629ffdb43959e92e87ebdaeebb5ac81c944b6a75c9c47e300f85abdf/zstandard-0.25.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:7149623bba7fdf7e7f24312953bcf73cae103db8cae49f8154dd1eadc8a29ecb", size = 5394148, upload-time = "2025-09-14T22:17:03.091Z" }, + { url = "https://files.pythonhosted.org/packages/e3/79/2bf870b3abeb5c070fe2d670a5a8d1057a8270f125ef7676d29ea900f496/zstandard-0.25.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:6a573a35693e03cf1d67799fd01b50ff578515a8aeadd4595d2a7fa9f3ec002a", size = 5451652, upload-time = "2025-09-14T22:17:04.979Z" }, + { url = "https://files.pythonhosted.org/packages/53/60/7be26e610767316c028a2cbedb9a3beabdbe33e2182c373f71a1c0b88f36/zstandard-0.25.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5a56ba0db2d244117ed744dfa8f6f5b366e14148e00de44723413b2f3938a902", size = 5546993, upload-time = "2025-09-14T22:17:06.781Z" }, + { url = "https://files.pythonhosted.org/packages/85/c7/3483ad9ff0662623f3648479b0380d2de5510abf00990468c286c6b04017/zstandard-0.25.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:10ef2a79ab8e2974e2075fb984e5b9806c64134810fac21576f0668e7ea19f8f", size = 5046806, upload-time = "2025-09-14T22:17:08.415Z" }, + { url = "https://files.pythonhosted.org/packages/08/b3/206883dd25b8d1591a1caa44b54c2aad84badccf2f1de9e2d60a446f9a25/zstandard-0.25.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aaf21ba8fb76d102b696781bddaa0954b782536446083ae3fdaa6f16b25a1c4b", size = 5576659, upload-time = "2025-09-14T22:17:10.164Z" }, + { url = "https://files.pythonhosted.org/packages/9d/31/76c0779101453e6c117b0ff22565865c54f48f8bd807df2b00c2c404b8e0/zstandard-0.25.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1869da9571d5e94a85a5e8d57e4e8807b175c9e4a6294e3b66fa4efb074d90f6", size = 4953933, upload-time = "2025-09-14T22:17:11.857Z" }, + { url = "https://files.pythonhosted.org/packages/18/e1/97680c664a1bf9a247a280a053d98e251424af51f1b196c6d52f117c9720/zstandard-0.25.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:809c5bcb2c67cd0ed81e9229d227d4ca28f82d0f778fc5fea624a9def3963f91", size = 5268008, upload-time = "2025-09-14T22:17:13.627Z" }, + { url = "https://files.pythonhosted.org/packages/1e/73/316e4010de585ac798e154e88fd81bb16afc5c5cb1a72eeb16dd37e8024a/zstandard-0.25.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f27662e4f7dbf9f9c12391cb37b4c4c3cb90ffbd3b1fb9284dadbbb8935fa708", size = 5433517, upload-time = "2025-09-14T22:17:16.103Z" }, + { url = "https://files.pythonhosted.org/packages/5b/60/dd0f8cfa8129c5a0ce3ea6b7f70be5b33d2618013a161e1ff26c2b39787c/zstandard-0.25.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99c0c846e6e61718715a3c9437ccc625de26593fea60189567f0118dc9db7512", size = 5814292, upload-time = "2025-09-14T22:17:17.827Z" }, + { url = "https://files.pythonhosted.org/packages/fc/5f/75aafd4b9d11b5407b641b8e41a57864097663699f23e9ad4dbb91dc6bfe/zstandard-0.25.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:474d2596a2dbc241a556e965fb76002c1ce655445e4e3bf38e5477d413165ffa", size = 5360237, upload-time = "2025-09-14T22:17:19.954Z" }, + { url = "https://files.pythonhosted.org/packages/ff/8d/0309daffea4fcac7981021dbf21cdb2e3427a9e76bafbcdbdf5392ff99a4/zstandard-0.25.0-cp312-cp312-win32.whl", hash = "sha256:23ebc8f17a03133b4426bcc04aabd68f8236eb78c3760f12783385171b0fd8bd", size = 436922, upload-time = "2025-09-14T22:17:24.398Z" }, + { url = "https://files.pythonhosted.org/packages/79/3b/fa54d9015f945330510cb5d0b0501e8253c127cca7ebe8ba46a965df18c5/zstandard-0.25.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffef5a74088f1e09947aecf91011136665152e0b4b359c42be3373897fb39b01", size = 506276, upload-time = "2025-09-14T22:17:21.429Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6b/8b51697e5319b1f9ac71087b0af9a40d8a6288ff8025c36486e0c12abcc4/zstandard-0.25.0-cp312-cp312-win_arm64.whl", hash = "sha256:181eb40e0b6a29b3cd2849f825e0fa34397f649170673d385f3598ae17cca2e9", size = 462679, upload-time = "2025-09-14T22:17:23.147Z" }, + { url = "https://files.pythonhosted.org/packages/35/0b/8df9c4ad06af91d39e94fa96cc010a24ac4ef1378d3efab9223cc8593d40/zstandard-0.25.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ec996f12524f88e151c339688c3897194821d7f03081ab35d31d1e12ec975e94", size = 795735, upload-time = "2025-09-14T22:17:26.042Z" }, + { url = "https://files.pythonhosted.org/packages/3f/06/9ae96a3e5dcfd119377ba33d4c42a7d89da1efabd5cb3e366b156c45ff4d/zstandard-0.25.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a1a4ae2dec3993a32247995bdfe367fc3266da832d82f8438c8570f989753de1", size = 640440, upload-time = "2025-09-14T22:17:27.366Z" }, + { url = "https://files.pythonhosted.org/packages/d9/14/933d27204c2bd404229c69f445862454dcc101cd69ef8c6068f15aaec12c/zstandard-0.25.0-cp313-cp313-manylinux2010_i686.manylinux2014_i686.manylinux_2_12_i686.manylinux_2_17_i686.whl", hash = "sha256:e96594a5537722fdfb79951672a2a63aec5ebfb823e7560586f7484819f2a08f", size = 5343070, upload-time = "2025-09-14T22:17:28.896Z" }, + { url = "https://files.pythonhosted.org/packages/6d/db/ddb11011826ed7db9d0e485d13df79b58586bfdec56e5c84a928a9a78c1c/zstandard-0.25.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bfc4e20784722098822e3eee42b8e576b379ed72cca4a7cb856ae733e62192ea", size = 5063001, upload-time = "2025-09-14T22:17:31.044Z" }, + { url = "https://files.pythonhosted.org/packages/db/00/87466ea3f99599d02a5238498b87bf84a6348290c19571051839ca943777/zstandard-0.25.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:457ed498fc58cdc12fc48f7950e02740d4f7ae9493dd4ab2168a47c93c31298e", size = 5394120, upload-time = "2025-09-14T22:17:32.711Z" }, + { url = "https://files.pythonhosted.org/packages/2b/95/fc5531d9c618a679a20ff6c29e2b3ef1d1f4ad66c5e161ae6ff847d102a9/zstandard-0.25.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:fd7a5004eb1980d3cefe26b2685bcb0b17989901a70a1040d1ac86f1d898c551", size = 5451230, upload-time = "2025-09-14T22:17:34.41Z" }, + { url = "https://files.pythonhosted.org/packages/63/4b/e3678b4e776db00f9f7b2fe58e547e8928ef32727d7a1ff01dea010f3f13/zstandard-0.25.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8e735494da3db08694d26480f1493ad2cf86e99bdd53e8e9771b2752a5c0246a", size = 5547173, upload-time = "2025-09-14T22:17:36.084Z" }, + { url = "https://files.pythonhosted.org/packages/4e/d5/ba05ed95c6b8ec30bd468dfeab20589f2cf709b5c940483e31d991f2ca58/zstandard-0.25.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3a39c94ad7866160a4a46d772e43311a743c316942037671beb264e395bdd611", size = 5046736, upload-time = "2025-09-14T22:17:37.891Z" }, + { url = "https://files.pythonhosted.org/packages/50/d5/870aa06b3a76c73eced65c044b92286a3c4e00554005ff51962deef28e28/zstandard-0.25.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:172de1f06947577d3a3005416977cce6168f2261284c02080e7ad0185faeced3", size = 5576368, upload-time = "2025-09-14T22:17:40.206Z" }, + { url = "https://files.pythonhosted.org/packages/5d/35/398dc2ffc89d304d59bc12f0fdd931b4ce455bddf7038a0a67733a25f550/zstandard-0.25.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3c83b0188c852a47cd13ef3bf9209fb0a77fa5374958b8c53aaa699398c6bd7b", size = 4954022, upload-time = "2025-09-14T22:17:41.879Z" }, + { url = "https://files.pythonhosted.org/packages/9a/5c/36ba1e5507d56d2213202ec2b05e8541734af5f2ce378c5d1ceaf4d88dc4/zstandard-0.25.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1673b7199bbe763365b81a4f3252b8e80f44c9e323fc42940dc8843bfeaf9851", size = 5267889, upload-time = "2025-09-14T22:17:43.577Z" }, + { url = "https://files.pythonhosted.org/packages/70/e8/2ec6b6fb7358b2ec0113ae202647ca7c0e9d15b61c005ae5225ad0995df5/zstandard-0.25.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0be7622c37c183406f3dbf0cba104118eb16a4ea7359eeb5752f0794882fc250", size = 5433952, upload-time = "2025-09-14T22:17:45.271Z" }, + { url = "https://files.pythonhosted.org/packages/7b/01/b5f4d4dbc59ef193e870495c6f1275f5b2928e01ff5a81fecb22a06e22fb/zstandard-0.25.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:5f5e4c2a23ca271c218ac025bd7d635597048b366d6f31f420aaeb715239fc98", size = 5814054, upload-time = "2025-09-14T22:17:47.08Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e5/fbd822d5c6f427cf158316d012c5a12f233473c2f9c5fe5ab1ae5d21f3d8/zstandard-0.25.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f187a0bb61b35119d1926aee039524d1f93aaf38a9916b8c4b78ac8514a0aaf", size = 5360113, upload-time = "2025-09-14T22:17:48.893Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e0/69a553d2047f9a2c7347caa225bb3a63b6d7704ad74610cb7823baa08ed7/zstandard-0.25.0-cp313-cp313-win32.whl", hash = "sha256:7030defa83eef3e51ff26f0b7bfb229f0204b66fe18e04359ce3474ac33cbc09", size = 436936, upload-time = "2025-09-14T22:17:52.658Z" }, + { url = "https://files.pythonhosted.org/packages/d9/82/b9c06c870f3bd8767c201f1edbdf9e8dc34be5b0fbc5682c4f80fe948475/zstandard-0.25.0-cp313-cp313-win_amd64.whl", hash = "sha256:1f830a0dac88719af0ae43b8b2d6aef487d437036468ef3c2ea59c51f9d55fd5", size = 506232, upload-time = "2025-09-14T22:17:50.402Z" }, + { url = "https://files.pythonhosted.org/packages/d4/57/60c3c01243bb81d381c9916e2a6d9e149ab8627c0c7d7abb2d73384b3c0c/zstandard-0.25.0-cp313-cp313-win_arm64.whl", hash = "sha256:85304a43f4d513f5464ceb938aa02c1e78c2943b29f44a750b48b25ac999a049", size = 462671, upload-time = "2025-09-14T22:17:51.533Z" }, + { url = "https://files.pythonhosted.org/packages/3d/5c/f8923b595b55fe49e30612987ad8bf053aef555c14f05bb659dd5dbe3e8a/zstandard-0.25.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e29f0cf06974c899b2c188ef7f783607dbef36da4c242eb6c82dcd8b512855e3", size = 795887, upload-time = "2025-09-14T22:17:54.198Z" }, + { url = "https://files.pythonhosted.org/packages/8d/09/d0a2a14fc3439c5f874042dca72a79c70a532090b7ba0003be73fee37ae2/zstandard-0.25.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:05df5136bc5a011f33cd25bc9f506e7426c0c9b3f9954f056831ce68f3b6689f", size = 640658, upload-time = "2025-09-14T22:17:55.423Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7c/8b6b71b1ddd517f68ffb55e10834388d4f793c49c6b83effaaa05785b0b4/zstandard-0.25.0-cp314-cp314-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:f604efd28f239cc21b3adb53eb061e2a205dc164be408e553b41ba2ffe0ca15c", size = 5379849, upload-time = "2025-09-14T22:17:57.372Z" }, + { url = "https://files.pythonhosted.org/packages/a4/86/a48e56320d0a17189ab7a42645387334fba2200e904ee47fc5a26c1fd8ca/zstandard-0.25.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223415140608d0f0da010499eaa8ccdb9af210a543fac54bce15babbcfc78439", size = 5058095, upload-time = "2025-09-14T22:17:59.498Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ad/eb659984ee2c0a779f9d06dbfe45e2dc39d99ff40a319895df2d3d9a48e5/zstandard-0.25.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e54296a283f3ab5a26fc9b8b5d4978ea0532f37b231644f367aa588930aa043", size = 5551751, upload-time = "2025-09-14T22:18:01.618Z" }, + { url = "https://files.pythonhosted.org/packages/61/b3/b637faea43677eb7bd42ab204dfb7053bd5c4582bfe6b1baefa80ac0c47b/zstandard-0.25.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ca54090275939dc8ec5dea2d2afb400e0f83444b2fc24e07df7fdef677110859", size = 6364818, upload-time = "2025-09-14T22:18:03.769Z" }, + { url = "https://files.pythonhosted.org/packages/31/dc/cc50210e11e465c975462439a492516a73300ab8caa8f5e0902544fd748b/zstandard-0.25.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e09bb6252b6476d8d56100e8147b803befa9a12cea144bbe629dd508800d1ad0", size = 5560402, upload-time = "2025-09-14T22:18:05.954Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ae/56523ae9c142f0c08efd5e868a6da613ae76614eca1305259c3bf6a0ed43/zstandard-0.25.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a9ec8c642d1ec73287ae3e726792dd86c96f5681eb8df274a757bf62b750eae7", size = 4955108, upload-time = "2025-09-14T22:18:07.68Z" }, + { url = "https://files.pythonhosted.org/packages/98/cf/c899f2d6df0840d5e384cf4c4121458c72802e8bda19691f3b16619f51e9/zstandard-0.25.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a4089a10e598eae6393756b036e0f419e8c1d60f44a831520f9af41c14216cf2", size = 5269248, upload-time = "2025-09-14T22:18:09.753Z" }, + { url = "https://files.pythonhosted.org/packages/1b/c0/59e912a531d91e1c192d3085fc0f6fb2852753c301a812d856d857ea03c6/zstandard-0.25.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:f67e8f1a324a900e75b5e28ffb152bcac9fbed1cc7b43f99cd90f395c4375344", size = 5430330, upload-time = "2025-09-14T22:18:11.966Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/7e31db1240de2df22a58e2ea9a93fc6e38cc29353e660c0272b6735d6669/zstandard-0.25.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:9654dbc012d8b06fc3d19cc825af3f7bf8ae242226df5f83936cb39f5fdc846c", size = 5811123, upload-time = "2025-09-14T22:18:13.907Z" }, + { url = "https://files.pythonhosted.org/packages/f6/49/fac46df5ad353d50535e118d6983069df68ca5908d4d65b8c466150a4ff1/zstandard-0.25.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4203ce3b31aec23012d3a4cf4a2ed64d12fea5269c49aed5e4c3611b938e4088", size = 5359591, upload-time = "2025-09-14T22:18:16.465Z" }, + { url = "https://files.pythonhosted.org/packages/c2/38/f249a2050ad1eea0bb364046153942e34abba95dd5520af199aed86fbb49/zstandard-0.25.0-cp314-cp314-win32.whl", hash = "sha256:da469dc041701583e34de852d8634703550348d5822e66a0c827d39b05365b12", size = 444513, upload-time = "2025-09-14T22:18:20.61Z" }, + { url = "https://files.pythonhosted.org/packages/3a/43/241f9615bcf8ba8903b3f0432da069e857fc4fd1783bd26183db53c4804b/zstandard-0.25.0-cp314-cp314-win_amd64.whl", hash = "sha256:c19bcdd826e95671065f8692b5a4aa95c52dc7a02a4c5a0cac46deb879a017a2", size = 516118, upload-time = "2025-09-14T22:18:17.849Z" }, + { url = "https://files.pythonhosted.org/packages/f0/ef/da163ce2450ed4febf6467d77ccb4cd52c4c30ab45624bad26ca0a27260c/zstandard-0.25.0-cp314-cp314-win_arm64.whl", hash = "sha256:d7541afd73985c630bafcd6338d2518ae96060075f9463d7dc14cfb33514383d", size = 476940, upload-time = "2025-09-14T22:18:19.088Z" }, +]