diff --git a/.github/release-drafter-python.yml b/.github/release-drafter-python.yml new file mode 100644 index 00000000..fa7c4949 --- /dev/null +++ b/.github/release-drafter-python.yml @@ -0,0 +1,37 @@ +name-template: 'river-client-py/v$RESOLVED_VERSION' +tag-template: 'river-client-py/v$RESOLVED_VERSION' +filter-by-commitish: true +include-paths: + - 'python-client/' +categories: + - title: '🚀 Features' + labels: + - 'feature' + - 'enhancement' + - 'python' + - title: '🐛 Bug Fixes' + labels: + - 'fix' + - 'bugfix' + - 'bug' + - title: '🧰 Maintenance' + label: 'chore' + - title: '🤖 Dependencies' + label: 'dependencies' +change-template: '- $TITLE @$AUTHOR (#$NUMBER)' +change-title-escapes: '\<*_&' +version-resolver: + major: + labels: + - 'major' + minor: + labels: + - 'minor' + patch: + labels: + - 'patch' + default: patch +template: | + ## Changes + + $CHANGES diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index d9e328a8..9641ced5 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -52,3 +52,32 @@ jobs: name: Test Report (${{ matrix.os }}) path: ./test-results.xml reporter: java-junit + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Install Python dependencies + working-directory: python-client + run: pip install -e ".[dev]" ty + + - name: Python lint + working-directory: python-client + run: | + ruff check . + ruff format --check . + + - name: Python type check + working-directory: python-client + run: ty check river/ + + - name: Python tests + working-directory: python-client + run: python -m pytest tests/ -v + + - name: Python type check generated clients + working-directory: python-client + run: | + ty check tests/generated/ + ty check tests/test_codegen.py diff --git a/.github/workflows/publish-python.yml b/.github/workflows/publish-python.yml new file mode 100644 index 00000000..585e794c --- /dev/null +++ b/.github/workflows/publish-python.yml @@ -0,0 +1,47 @@ +name: Build and Publish Python Package + +on: + release: + types: [published] + +jobs: + build-and-publish: + # Only run for Python releases + if: startsWith(github.event.release.tag_name, 'river-client-py/') + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Install uv + uses: astral-sh/setup-uv@v3 + with: + enable-cache: true + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Check if version already published + working-directory: python-client + id: check + run: | + version=$(python -c " + import tomllib + with open('pyproject.toml', 'rb') as f: + print(tomllib.load(f)['project']['version']) + ") + echo "version=$version" >> "$GITHUB_OUTPUT" + if uv pip install --dry-run "river-client==$version" 2>/dev/null; then + echo "skip=true" >> "$GITHUB_OUTPUT" + else + echo "skip=false" >> "$GITHUB_OUTPUT" + fi + + - name: Build and publish + if: steps.check.outputs.skip == 'false' + working-directory: python-client + run: | + uv build + UV_PUBLISH_TOKEN="${{ secrets.PYPI_TOKEN }}" \ + uv publish diff --git a/.github/workflows/release-drafter-python.yml b/.github/workflows/release-drafter-python.yml new file mode 100644 index 00000000..746dcca2 --- /dev/null +++ b/.github/workflows/release-drafter-python.yml @@ -0,0 +1,27 @@ +name: Release Drafter (Python) + +on: + workflow_dispatch: {} + push: + branches: + - main + pull_request: + types: [opened, reopened, synchronize] + pull_request_target: + types: [opened, reopened, synchronize] + +permissions: + contents: read + +jobs: + update_release_draft: + permissions: + contents: write + pull-requests: write + runs-on: ubuntu-latest + steps: + - uses: release-drafter/release-drafter@v5 + with: + config-name: release-drafter-python.yml + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.prettierignore b/.prettierignore index 9c247287..11d5e660 100644 --- a/.prettierignore +++ b/.prettierignore @@ -1,2 +1,8 @@ .cache node_modules +python-client/.venv +python-client/.pytest_cache +python-client/tests/*.mjs +python-client/tests/test_schema.json +python-client/tests/generated +.codex-review-tmp diff --git a/package.json b/package.json index f9510f34..ff12988c 100644 --- a/package.json +++ b/package.json @@ -73,7 +73,8 @@ "format:fix": "npx prettier . --write", "lint": "eslint .", "lint:fix": "eslint . --fix", - "fix": "npm run format:fix && npm run lint:fix", + "fix": "npm run format:fix && npm run lint:fix && npm run fix:python", + "fix:python": "cd python-client && ruff check --fix . && ruff format .", "build": "rm -rf dist && tsup && du -sh dist", "prepack": "npm run build", "release": "npm publish --access public", diff --git a/python-client/.gitignore b/python-client/.gitignore new file mode 100644 index 00000000..bf5e9200 --- /dev/null +++ b/python-client/.gitignore @@ -0,0 +1,10 @@ +.venv/ +__pycache__/ +*.pyc +*.egg-info/ +.pytest_cache/ +dist/ +build/ +.coverage +# esbuild build artifacts (built from .ts at test time) +tests/*.mjs diff --git a/python-client/pyproject.toml b/python-client/pyproject.toml new file mode 100644 index 00000000..52ef09eb --- /dev/null +++ b/python-client/pyproject.toml @@ -0,0 +1,45 @@ +[build-system] +requires = ["setuptools>=68.0", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "river-client" +version = "0.1.0" +description = "Python client for River protocol v2.0" +requires-python = ">=3.10" +license = {text = "MIT"} +dependencies = [ + "websockets>=12.0", + "msgpack>=1.0", + "typing_extensions>=4.0", + "jinja2>=3.0", + "opentelemetry-api>=1.0", +] + +[project.optional-dependencies] +dev = [ + "pytest>=8.0", + "pytest-asyncio>=0.23", + "ruff>=0.4", +] + +[tool.pytest.ini_options] +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "session" +testpaths = ["tests"] + +[tool.ruff] +target-version = "py310" +exclude = ["tests/generated"] + +[tool.ruff.lint] +select = ["E", "F", "I", "W"] + +[tool.ty.environment] +extra-paths = ["tests"] + +[tool.setuptools.packages.find] +include = ["river*"] + +[tool.setuptools.package-data] +"river.codegen" = ["templates/*.j2"] diff --git a/python-client/river/__init__.py b/python-client/river/__init__.py new file mode 100644 index 00000000..34864460 --- /dev/null +++ b/python-client/river/__init__.py @@ -0,0 +1,35 @@ +"""River protocol v2.0 Python client implementation. + +This client was generated with the assistance of AI (Claude). +""" + +from river.client import ( + ErrResult, + OkResult, + RiverClient, + StreamResult, + SubscriptionResult, + UploadResult, +) +from river.codec import BinaryCodec +from river.session import SessionOptions +from river.streams import Readable, Writable +from river.transport import WebSocketClientTransport +from river.types import Err, Ok, TransportMessage + +__all__ = [ + "RiverClient", + "OkResult", + "ErrResult", + "StreamResult", + "UploadResult", + "SubscriptionResult", + "WebSocketClientTransport", + "BinaryCodec", + "SessionOptions", + "TransportMessage", + "Ok", + "Err", + "Readable", + "Writable", +] diff --git a/python-client/river/client.py b/python-client/river/client.py new file mode 100644 index 00000000..7098414d --- /dev/null +++ b/python-client/river/client.py @@ -0,0 +1,567 @@ +"""River client for invoking remote procedures. + +Provides the high-level API for calling rpc, stream, upload, and +subscription procedures on a River server. +""" + +from __future__ import annotations + +import asyncio +import logging +from collections.abc import Awaitable, Callable +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any, Generic, Literal, TypeVar + +from typing_extensions import TypedDict + +if TYPE_CHECKING: + from river.session import SessionOptions + +from river.session import SessionState +from river.streams import Readable, Writable +from river.transport import WebSocketClientTransport +from river.types import ( + CANCEL_CODE, + UNEXPECTED_DISCONNECT_CODE, + ControlFlags, + PartialTransportMessage, + TransportMessage, + cancel_message, + close_stream_message, + err_result, + generate_id, + is_stream_cancel, + is_stream_close, +) + +logger = logging.getLogger(__name__) + +T = TypeVar("T") +TOutput = TypeVar("TOutput") +TPayload = TypeVar("TPayload") + + +class OkResult(TypedDict, Generic[TPayload]): + """Successful result from a procedure call.""" + + ok: Literal[True] + payload: TPayload + + +class ErrResult(TypedDict, Generic[TPayload]): + """Error result from a procedure call.""" + + ok: Literal[False] + payload: TPayload + + +@dataclass +class StreamResult(Generic[T, TOutput]): + """Result of opening a stream procedure. + + Generic over the input type ``T`` written to ``req_writable`` + and the output type ``TOutput`` read from ``res_readable``. + """ + + req_writable: Writable[T] + res_readable: Readable[TOutput] + + +@dataclass +class UploadResult(Generic[T, TOutput]): + """Result of opening an upload procedure. + + Generic over the input type ``T`` written to ``req_writable`` + and the output type ``TOutput`` returned by ``finalize()``. + """ + + req_writable: Writable[T] + finalize: Callable[[], Awaitable[TOutput]] + + +@dataclass +class SubscriptionResult(Generic[T]): + """Result of opening a subscription procedure. + + Generic over the output type ``T`` received from ``res_readable``. + """ + + res_readable: Readable[T] + + +class RiverClient: + """Client for invoking procedures on a River server. + + Usage: + transport = WebSocketClientTransport("ws://localhost:8080", ...) + client = RiverClient(transport, server_id="my-server") + + # RPC + result = await client.rpc("service", "procedure", {"arg": 1}) + + # Stream + stream = client.stream("service", "procedure", {"arg": 1}) + stream.req_writable.write({"data": "hello"}) + async for msg in stream.res_readable: + print(msg) + + # Upload + upload = client.upload("service", "procedure", {"arg": 1}) + upload.req_writable.write({"data": "chunk1"}) + upload.req_writable.close() + result = await upload.finalize() + + # Subscription + sub = client.subscribe("service", "procedure", {"arg": 1}) + async for msg in sub.res_readable: + print(msg) + """ + + def __init__( + self, + transport: WebSocketClientTransport, + server_id: str | None = None, + connect_on_invoke: bool = True, + eagerly_connect: bool = False, + ) -> None: + self._transport = transport + self._server_id = server_id or transport.server_id + self._connect_on_invoke = connect_on_invoke + + if eagerly_connect: + transport.connect(self._server_id) + + @classmethod + async def connect( + cls, + url: str, + *, + client_id: str | None = None, + server_id: str = "SERVER", + handshake_metadata: Any = None, + options: "SessionOptions | None" = None, + ) -> "RiverClient": + """Create a connected RiverClient. + + Convenience factory that creates a transport and eagerly connects. + """ + from river.session import SessionOptions as _SO + + transport = WebSocketClientTransport( + url, + client_id=client_id, + server_id=server_id, + handshake_metadata=handshake_metadata, + options=options or _SO(), + ) + client = cls(transport, server_id=server_id, eagerly_connect=True) + return client + + @property + def transport(self) -> WebSocketClientTransport: + return self._transport + + async def rpc( + self, + service_name: str, + procedure_name: str, + init: Any, + abort_signal: asyncio.Event | None = None, + ) -> Any: + """Invoke an RPC procedure. + + Returns the result dict: {"ok": True/False, "payload": ...} + """ + result = self._handle_proc( + proc_type="rpc", + service_name=service_name, + procedure_name=procedure_name, + init=init, + abort_signal=abort_signal, + ) + # For RPC, we await the single response + readable = result["res_readable"] + done, value = await readable.next() + if done: + return err_result(UNEXPECTED_DISCONNECT_CODE, "No response received") + return value + + def stream( + self, + service_name: str, + procedure_name: str, + init: Any, + abort_signal: asyncio.Event | None = None, + ) -> StreamResult[Any, Any]: + """Open a stream procedure. + + Returns StreamResult with req_writable and res_readable. + """ + result = self._handle_proc( + proc_type="stream", + service_name=service_name, + procedure_name=procedure_name, + init=init, + abort_signal=abort_signal, + ) + return StreamResult( + req_writable=result["req_writable"], + res_readable=result["res_readable"], + ) + + def upload( + self, + service_name: str, + procedure_name: str, + init: Any, + abort_signal: asyncio.Event | None = None, + ) -> UploadResult[Any, Any]: + """Open an upload procedure. + + Returns UploadResult with req_writable and finalize(). + """ + result = self._handle_proc( + proc_type="upload", + service_name=service_name, + procedure_name=procedure_name, + init=init, + abort_signal=abort_signal, + ) + + async def finalize() -> dict[str, Any]: + writable = result["req_writable"] + if writable.is_writable(): + writable.close() + readable = result["res_readable"] + done, value = await readable.next() + if done: + return err_result(UNEXPECTED_DISCONNECT_CODE, "No response received") + return value + + return UploadResult( + req_writable=result["req_writable"], + finalize=finalize, + ) + + def subscribe( + self, + service_name: str, + procedure_name: str, + init: Any, + abort_signal: asyncio.Event | None = None, + ) -> SubscriptionResult[Any]: + """Open a subscription procedure. + + Returns SubscriptionResult with res_readable. + """ + result = self._handle_proc( + proc_type="subscription", + service_name=service_name, + procedure_name=procedure_name, + init=init, + abort_signal=abort_signal, + ) + return SubscriptionResult(res_readable=result["res_readable"]) + + def _handle_proc( + self, + proc_type: str, + service_name: str, + procedure_name: str, + init: Any, + abort_signal: asyncio.Event | None = None, + ) -> dict[str, Any]: + """Core procedure dispatch logic. + + Sets up the stream, registers message handlers, sends the init message. + """ + to = self._server_id + transport = self._transport + + # If transport is closed, return immediate disconnect error + if transport.get_status() != "open": + res_readable = Readable() + res_readable._push_value( + err_result(UNEXPECTED_DISCONNECT_CODE, "transport is closed") + ) + res_readable._trigger_close() + req_writable = Writable(write_cb=lambda _: None, close_cb=None) + req_writable._closed = True + return { + "res_readable": res_readable, + "req_writable": req_writable, + } + + # Connect if needed + if self._connect_on_invoke: + transport.connect(to) + + # Get the session and a send function. + # If connect() couldn't start (retry budget exhausted, transport + # closing, etc.) the session will be in NO_CONNECTION with no + # connect task in flight — fail immediately instead of hanging. + session = transport._get_or_create_session(to) + connect_task = transport._connect_tasks.get(to) + has_active_connect = connect_task is not None and not connect_task.done() + if session.state == SessionState.NO_CONNECTION and not has_active_connect: + transport._delete_session(to) + res_readable = Readable() + res_readable._push_value( + err_result( + UNEXPECTED_DISCONNECT_CODE, + f"{to} connection failed", + ) + ) + res_readable._trigger_close() + req_writable = Writable(write_cb=lambda _: None, close_cb=None) + req_writable._closed = True + return { + "res_readable": res_readable, + "req_writable": req_writable, + } + + session_id = session.id + try: + send_fn = transport.get_session_bound_send_fn(to, session_id) + except RuntimeError: + # Session already dead + res_readable = Readable() + res_readable._push_value( + err_result( + UNEXPECTED_DISCONNECT_CODE, + f"{to} unexpectedly disconnected", + ) + ) + res_readable._trigger_close() + req_writable = Writable(write_cb=lambda _: None, close_cb=None) + req_writable._closed = True + return { + "res_readable": res_readable, + "req_writable": req_writable, + } + + # Determine flags + proc_closes_with_init = proc_type in ("rpc", "subscription") + stream_id = generate_id() + + # Create readable for responses + res_readable: Readable = Readable() + + # Tracking state + clean_close = True + cleaned_up = False + abort_task: asyncio.Task | None = None + + def cleanup(): + nonlocal cleaned_up + if cleaned_up: + return + cleaned_up = True + transport.remove_event_listener("message", on_message) + transport.remove_event_listener("sessionStatus", on_session_status) + if abort_task is not None and not abort_task.done(): + abort_task.cancel() + + def _try_cleanup(): + """Run cleanup once both sides have been closed/triggered.""" + if res_readable._closed and req_writable.is_closed(): + cleanup() + + def close_readable(): + if not res_readable._closed: + try: + res_readable._trigger_close() + except RuntimeError: + pass + _try_cleanup() + + # Create writable for requests + def write_cb(raw_value: Any) -> None: + nonlocal clean_close + try: + send_fn( + PartialTransportMessage( + payload=raw_value, + stream_id=stream_id, + control_flags=0, + ) + ) + except RuntimeError: + # Session is gone — push disconnect error and tear down + clean_close = False + try: + res_readable._push_value( + err_result( + UNEXPECTED_DISCONNECT_CODE, + "send failed: session closed", + ) + ) + except RuntimeError: + pass + close_readable() + if req_writable.is_writable(): + req_writable.close() + + def close_cb() -> None: + nonlocal clean_close + if not proc_closes_with_init and clean_close: + try: + send_fn(close_stream_message(stream_id)) + except RuntimeError: + pass + _try_cleanup() + + req_writable: Writable = Writable(write_cb=write_cb, close_cb=close_cb) + + def on_message(msg: TransportMessage) -> None: + nonlocal clean_close + if msg.stream_id != stream_id: + return + if msg.to != transport.client_id: + return + + # Cancel from server — always an error + if is_stream_cancel(msg.control_flags): + clean_close = False + payload = msg.payload + if isinstance(payload, dict) and "ok" in payload and not payload["ok"]: + # Already error-shaped, forward as-is + res_readable._push_value(payload) + else: + # Force to error shape (reject ok:true on cancel) + code = ( + payload.get("code", "UNKNOWN") + if isinstance(payload, dict) + else "UNKNOWN" + ) + message = ( + payload.get("message", str(payload)) + if isinstance(payload, dict) + else str(payload) + ) + res_readable._push_value(err_result(code, message)) + close_readable() + if req_writable.is_writable(): + req_writable.close() + return + + if res_readable.is_closed(): + return + + # Normal payload (not a CLOSE control) + if isinstance(msg.payload, dict): + if msg.payload.get("type") != "CLOSE": + if "ok" in msg.payload: + res_readable._push_value(msg.payload) + + # Stream close + if is_stream_close(msg.control_flags): + close_readable() + + def on_session_status(evt: dict) -> None: + nonlocal clean_close + if evt.get("status") != "closing": + return + event_session = evt.get("session") + if event_session is None: + return + if event_session.to_id != to or event_session.id != session_id: + return + + clean_close = False + try: + res_readable._push_value( + err_result( + UNEXPECTED_DISCONNECT_CODE, + f"{to} unexpectedly disconnected", + ) + ) + except RuntimeError: + pass + close_readable() + if req_writable.is_writable(): + req_writable.close() + + def on_client_cancel() -> None: + nonlocal clean_close + if cleaned_up: + return + clean_close = False + try: + res_readable._push_value(err_result(CANCEL_CODE, "cancelled by client")) + except RuntimeError: + pass + close_readable() + if req_writable.is_writable(): + req_writable.close() + try: + send_fn( + cancel_message( + stream_id, + err_result(CANCEL_CODE, "cancelled by client"), + ) + ) + except RuntimeError: + pass + + # Register listeners + transport.add_event_listener("message", on_message) + transport.add_event_listener("sessionStatus", on_session_status) + + # Wire up abort signal + if abort_signal is not None: + + async def _watch_abort(): + await abort_signal.wait() + on_client_cancel() + + try: + loop = asyncio.get_running_loop() + abort_task = loop.create_task(_watch_abort()) + except RuntimeError: + pass + + # Send init message + init_flags = ( + ControlFlags.StreamOpenBit | ControlFlags.StreamClosedBit + if proc_closes_with_init + else ControlFlags.StreamOpenBit + ) + + try: + send_fn( + PartialTransportMessage( + payload=init, + stream_id=stream_id, + control_flags=init_flags, + service_name=service_name, + procedure_name=procedure_name, + ) + ) + except RuntimeError: + # Session dead at send time + try: + res_readable._push_value( + err_result( + UNEXPECTED_DISCONNECT_CODE, + f"{to} unexpectedly disconnected", + ) + ) + res_readable._trigger_close() + except RuntimeError: + pass + req_writable._closed = True + cleanup() + return { + "res_readable": res_readable, + "req_writable": req_writable, + } + + # For rpc/subscription, close request side immediately + if proc_closes_with_init: + req_writable._closed = True + + return { + "res_readable": res_readable, + "req_writable": req_writable, + } diff --git a/python-client/river/codec.py b/python-client/river/codec.py new file mode 100644 index 00000000..16c914d1 --- /dev/null +++ b/python-client/river/codec.py @@ -0,0 +1,107 @@ +"""Codec layer for encoding/decoding transport messages.""" + +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import Any + +from river.types import TransportMessage + + +class Codec(ABC): + """Abstract codec for encoding/decoding objects to/from bytes.""" + + @abstractmethod + def to_buffer(self, obj: dict[str, Any]) -> bytes: + """Encode an object to bytes.""" + ... + + @abstractmethod + def from_buffer(self, buf: bytes) -> dict[str, Any]: + """Decode bytes to an object.""" + ... + + +_BIGINT_EXT_TYPE = 0 +# Use JS Number.MAX_SAFE_INTEGER bounds, not msgpack's 64-bit range. +# Values outside this range lose precision when decoded as JS numbers. +_MAX_SAFE_INTEGER = 2**53 - 1 +_MIN_SAFE_INTEGER = -(2**53 - 1) + + +class BinaryCodec(Codec): + """Codec using msgpack serialization (matches TypeScript BinaryCodec).""" + + name = "binary" + + def to_buffer(self, obj: dict[str, Any]) -> bytes: + import msgpack + + return msgpack.packb(self._prepare(obj), use_bin_type=True) + + def from_buffer(self, buf: bytes) -> dict[str, Any]: + import msgpack + + return msgpack.unpackb(buf, raw=False, ext_hook=self._ext_decode) + + @staticmethod + def _prepare(obj: Any) -> Any: + """Walk *obj* and replace ints outside JS safe range with ExtType.""" + import msgpack + + if isinstance(obj, dict): + return {k: BinaryCodec._prepare(v) for k, v in obj.items()} + if isinstance(obj, (list, tuple)): + return [BinaryCodec._prepare(v) for v in obj] + if isinstance(obj, int) and not isinstance(obj, bool) and ( + obj > _MAX_SAFE_INTEGER or obj < _MIN_SAFE_INTEGER + ): + data = msgpack.packb(str(obj), use_bin_type=True) + return msgpack.ExtType(_BIGINT_EXT_TYPE, data) + return obj + + @staticmethod + def _ext_decode(code: int, data: bytes) -> Any: + import msgpack + + if code == _BIGINT_EXT_TYPE: + val = msgpack.unpackb(data, raw=False) + return int(val) + return msgpack.ExtType(code, data) + + +class CodecMessageAdapter: + """Wraps a Codec with error handling and validation for TransportMessage.""" + + def __init__(self, codec: Codec) -> None: + self._codec = codec + + def to_buffer(self, msg: TransportMessage) -> tuple[bool, bytes | str]: + """Serialize a TransportMessage to bytes. + + Returns (True, bytes) on success, (False, error_reason) on failure. + """ + try: + raw = msg.to_dict() + buf = self._codec.to_buffer(raw) + return True, buf + except Exception as e: + return False, f"Failed to serialize message: {e}" + + def from_buffer(self, buf: bytes) -> tuple[bool, TransportMessage | str]: + """Deserialize bytes to a TransportMessage. + + Returns (True, TransportMessage) on success, (False, error_reason) on failure. + Validation of required fields and types is handled by + :meth:`TransportMessage.from_dict`. + """ + try: + raw = self._codec.from_buffer(buf) + if not isinstance(raw, dict): + return False, f"Expected dict, got {type(raw).__name__}" + msg = TransportMessage.from_dict(raw) + return True, msg + except (KeyError, TypeError) as e: + return False, str(e) + except Exception as e: + return False, f"Failed to deserialize message: {e}" diff --git a/python-client/river/codegen/__init__.py b/python-client/river/codegen/__init__.py new file mode 100644 index 00000000..d7c4e680 --- /dev/null +++ b/python-client/river/codegen/__init__.py @@ -0,0 +1,10 @@ +"""River protocol codegen — generates typed Python clients from JSON Schema.""" + +from river.codegen.emitter import write_generated_files +from river.codegen.schema import SchemaConverter, SchemaIR + +__all__ = [ + "SchemaConverter", + "SchemaIR", + "write_generated_files", +] diff --git a/python-client/river/codegen/__main__.py b/python-client/river/codegen/__main__.py new file mode 100644 index 00000000..78727f7d --- /dev/null +++ b/python-client/river/codegen/__main__.py @@ -0,0 +1,64 @@ +"""CLI entry point: python -m river.codegen + +Usage: + python -m river.codegen --schema schema.json --output generated/ +""" + +from __future__ import annotations + +import argparse +import json + +from river.codegen.emitter import write_generated_files +from river.codegen.schema import SchemaConverter + + +def main(argv: list[str] | None = None) -> None: + parser = argparse.ArgumentParser( + prog="river.codegen", + description="Generate typed Python clients from a River JSON schema.", + ) + parser.add_argument( + "--schema", + "-s", + required=True, + help="Path to the serialized schema JSON file.", + ) + parser.add_argument( + "--output", + "-o", + required=True, + help="Output directory for generated files.", + ) + parser.add_argument( + "--package", + default=None, + help="Absolute import prefix instead of relative imports.", + ) + parser.add_argument( + "--client-name", + default=None, + help="Generate a root client class with this name " + "that aggregates all services.", + ) + + args = parser.parse_args(argv) + + with open(args.schema) as f: + raw_schema = json.load(f) + + converter = SchemaConverter() + ir = converter.convert(raw_schema) + + written = write_generated_files( + ir, args.output, package=args.package, client_name=args.client_name + ) + + for path in written: + print(f" wrote {path}") + + print(f"Generated {len(written)} files in {args.output}") + + +if __name__ == "__main__": + main() diff --git a/python-client/river/codegen/emitter.py b/python-client/river/codegen/emitter.py new file mode 100644 index 00000000..1a4c5454 --- /dev/null +++ b/python-client/river/codegen/emitter.py @@ -0,0 +1,283 @@ +"""IR → Python source file emitter. + +Renders Jinja2 templates from the ``templates/`` directory against +a :class:`SchemaIR` to produce the generated output package. +""" + +from __future__ import annotations + +import os +from pathlib import Path + +import jinja2 + +from river.codegen.schema import ( + SchemaIR, + ServiceDef, + _sanitize_identifier, + _to_pascal_case, +) + +_TEMPLATE_DIR = Path(__file__).parent / "templates" + +_env = jinja2.Environment( + loader=jinja2.FileSystemLoader(str(_TEMPLATE_DIR)), + keep_trailing_newline=True, + lstrip_blocks=True, + trim_blocks=True, +) +_env.filters["pascal"] = _to_pascal_case + + +def _escape_docstring(s: str) -> str: + """Escape a string for use inside triple-quoted docstrings.""" + s = s.replace("\\", "\\\\").replace('"""', r"\"\"\"") + # A trailing " would merge with the closing """ to form """", breaking syntax. + if s.endswith('"'): + s = s[:-1] + r"\"" + return s + + +_env.filters["docstring"] = _escape_docstring + + +def _result_type(proc) -> str: # noqa: ANN001 + """Build the typed result annotation for a procedure.""" + ok = f"OkResult[{proc.output_type.annotation}]" + if proc.error_type: + err = f"ErrResult[{proc.error_type.annotation} | ProtocolError]" + else: + err = "ErrResult[ProtocolError]" + return f"{ok} | {err}" + + +_env.filters["result_type"] = _result_type + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _field_annotation(f) -> str: # noqa: ANN001 + """Return the full annotation for a TypedDict field.""" + ann = f.type_ref.annotation + if not f.required: + return f"NotRequired[{ann}]" + return ann + + +def _collect_used_type_names(svc: ServiceDef, ir: SchemaIR) -> list[str]: + """Collect TypedDict names actually referenced in method signatures.""" + td_names = {td.name for td in ir.typedicts} + names: set[str] = set() + + for proc in svc.procedures: + _extract_names(proc.init_type.annotation, td_names, names) + if proc.input_type: + _extract_names(proc.input_type.annotation, td_names, names) + _extract_names(proc.output_type.annotation, td_names, names) + if proc.error_type: + _extract_names(proc.error_type.annotation, td_names, names) + + return sorted(names) + + +def _extract_names(annotation: str, known: set[str], out: set[str]) -> None: + import re + + for name in re.findall(r"[A-Za-z_]\w*", annotation): + if name in known: + out.add(name) + + +# --------------------------------------------------------------------------- +# Rendering +# --------------------------------------------------------------------------- + + +def _prepare_typedicts(ir: SchemaIR) -> list[dict]: + """Prepare TypedDict data for the types template.""" + result = [] + for td in ir.typedicts: + fields = [] + for f in td.fields: + fields.append( + { + "name": f.name, + "annotation": _field_annotation(f), + "description": f.description, + } + ) + result.append( + {"name": td.name, "description": td.description, "fields": fields} + ) + return result + + +def render_errors() -> str: + return _env.get_template("errors.py.j2").render() + + +def render_types(ir: SchemaIR) -> str: + typedicts = _prepare_typedicts(ir) + + # Append handshake TypedDict if present + if ir.handshake_type: + hs_fields = [] + for f in ir.handshake_type.fields: + hs_fields.append({"name": f.name, "annotation": _field_annotation(f)}) + typedicts.append( + { + "name": ir.handshake_type.name, + "description": ir.handshake_type.description, + "fields": hs_fields, + } + ) + + needs_literal = any( + "Literal[" in f["annotation"] for td in typedicts for f in td["fields"] + ) + has_not_required = any( + "NotRequired[" in f["annotation"] for td in typedicts for f in td["fields"] + ) + + typing_ext = ["TypedDict"] + if has_not_required: + typing_ext.append("NotRequired") + + return _env.get_template("types.py.j2").render( + typedicts=typedicts, + needs_literal=needs_literal, + typing_ext_imports=sorted(typing_ext), + ) + + +def render_service_client(svc: ServiceDef, ir: SchemaIR, import_prefix: str) -> str: + type_names = _collect_used_type_names(svc, ir) + types_module = "._types" if import_prefix == "." else f"{import_prefix}_types" + + proc_types = {p.proc_type for p in svc.procedures} + has_rpc = "rpc" in proc_types + has_stream = "stream" in proc_types + has_upload = "upload" in proc_types + has_subscription = "subscription" in proc_types + + # Check if any annotation references Literal (e.g. const schemas) + all_annotations = [] + for p in svc.procedures: + all_annotations.append(p.init_type.annotation) + all_annotations.append(p.output_type.annotation) + if p.input_type: + all_annotations.append(p.input_type.annotation) + if p.error_type: + all_annotations.append(p.error_type.annotation) + needs_literal = any("Literal[" in a for a in all_annotations) + + return _env.get_template("service_client.py.j2").render( + service=svc, + type_names=type_names, + types_module=types_module, + has_rpc=has_rpc, + has_stream=has_stream, + has_upload=has_upload, + has_subscription=has_subscription, + needs_literal=needs_literal, + ) + + +def _module_name(service_name: str) -> str: + """Sanitize a service name for use as a Python module name.""" + return _sanitize_identifier(service_name) + + +def render_root_client(ir: SchemaIR, client_name: str, import_prefix: str) -> str: + imports = [] + services = [] + for svc in ir.services: + mod_name = _module_name(svc.name) + cls = f"{svc.class_name}Client" + if import_prefix == ".": + mod = f".{mod_name}_client" + else: + mod = f"{import_prefix}{mod_name}_client" + imports.append((mod, cls)) + services.append((_sanitize_identifier(svc.name), cls)) + + imports.sort(key=lambda x: x[0]) + services.sort(key=lambda x: x[0]) + + return _env.get_template("root_client.py.j2").render( + client_name=client_name, + imports=imports, + services=services, + ) + + +def render_init( + ir: SchemaIR, import_prefix: str, client_name: str | None = None +) -> str: + imports = [] + for svc in ir.services: + mod_name = _module_name(svc.name) + if import_prefix == ".": + mod = f".{mod_name}_client" + else: + mod = f"{import_prefix}{mod_name}_client" + imports.append((mod, f"{svc.class_name}Client")) + + if client_name: + if import_prefix == ".": + mod = "._root_client" + else: + mod = f"{import_prefix}_root_client" + imports.append((mod, client_name)) + + if ir.handshake_type: + types_mod = "._types" if import_prefix == "." else f"{import_prefix}_types" + imports.append((types_mod, ir.handshake_type.name)) + + imports.sort(key=lambda x: x[0]) + + return _env.get_template("init.py.j2").render(imports=imports) + + +# --------------------------------------------------------------------------- +# Top-level write function +# --------------------------------------------------------------------------- + + +def write_generated_files( + ir: SchemaIR, + output_dir: str, + package: str | None = None, + client_name: str | None = None, +) -> list[str]: + """Write all generated files to *output_dir*. + + Returns the list of written file paths. + """ + os.makedirs(output_dir, exist_ok=True) + import_prefix = f"{package}." if package else "." + written: list[str] = [] + + def _write(name: str, content: str) -> None: + p = Path(output_dir) / name + p.write_text(content) + written.append(str(p)) + + _write("_errors.py", render_errors()) + _write("_types.py", render_types(ir)) + + for svc in ir.services: + _write( + f"{_module_name(svc.name)}_client.py", + render_service_client(svc, ir, import_prefix), + ) + + if client_name: + _write("_root_client.py", render_root_client(ir, client_name, import_prefix)) + + _write("__init__.py", render_init(ir, import_prefix, client_name=client_name)) + + return written diff --git a/python-client/river/codegen/schema.py b/python-client/river/codegen/schema.py new file mode 100644 index 00000000..ff85ad71 --- /dev/null +++ b/python-client/river/codegen/schema.py @@ -0,0 +1,526 @@ +"""JSON Schema → Python IR conversion. + +Parses the serialized River schema (produced by serializeSchema() in TS) +into intermediate representation dataclasses that the emitter can turn +into Python source files. +""" + +from __future__ import annotations + +import keyword +import re +from dataclasses import dataclass, field + +# --------------------------------------------------------------------------- +# IR types +# --------------------------------------------------------------------------- + + +@dataclass +class TypeRef: + """A reference to a Python type, either inline or named.""" + + annotation: str # e.g. "str", "int", "list[float]", "TestAddInit" + + +@dataclass +class TypedDictField: + name: str + type_ref: TypeRef + required: bool = True + description: str | None = None + + +@dataclass +class TypedDictDef: + """A TypedDict class to be emitted.""" + + name: str + fields: list[TypedDictField] = field(default_factory=list) + description: str | None = None + + +@dataclass +class ProcedureDef: + """Describes a single procedure in a service.""" + + name: str # camelCase wire name + py_name: str # snake_case Python method name + proc_type: str # "rpc" | "stream" | "upload" | "subscription" + init_type: TypeRef # type annotation for init param + input_type: TypeRef | None # only for stream/upload + output_type: TypeRef # ok payload type + error_type: TypeRef | None # service-specific errors + description: str | None = None + + +@dataclass +class ServiceDef: + """Describes a single service.""" + + name: str # wire name + class_name: str # PascalCase Python class name + procedures: list[ProcedureDef] = field(default_factory=list) + + +@dataclass +class SchemaIR: + """Complete intermediate representation for the whole server schema.""" + + services: list[ServiceDef] = field(default_factory=list) + typedicts: list[TypedDictDef] = field(default_factory=list) + handshake_type: TypedDictDef | None = None + + +# --------------------------------------------------------------------------- +# Protocol error codes (always present in the errors union) +# --------------------------------------------------------------------------- + +PROTOCOL_ERROR_CODES = frozenset( + {"UNCAUGHT_ERROR", "UNEXPECTED_DISCONNECT", "INVALID_REQUEST", "CANCEL"} +) + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _sanitize_identifier(s: str) -> str: + """Replace characters illegal in Python identifiers with underscores.""" + # Replace dashes, spaces, and other non-alnum/non-underscore chars + s = re.sub(r"[^a-zA-Z0-9_]", "_", s) + # Strip leading digits so the result is a valid identifier + s = re.sub(r"^[0-9]+", "", s) + return s or "unnamed" + + +def _to_pascal_case(s: str) -> str: + """Convert a camelCase, snake_case, or space-separated string to PascalCase.""" + s = _sanitize_identifier(s) + # Handle snake_case or space-separated + if "_" in s: + words = re.split(r"_+", s) + return "".join(word.capitalize() for word in words if word) + # camelCase → PascalCase: just capitalize first letter + if s: + return s[0].upper() + s[1:] + return s + + +def _to_snake_case(s: str) -> str: + """Convert camelCase to snake_case.""" + s = _sanitize_identifier(s) + result = re.sub(r"([A-Z])", r"_\1", s).lower() + result = result.lstrip("_") + if keyword.iskeyword(result): + result += "_" + return result + + +def _safe_field_name(name: str) -> str: + """Normalize a property name into a valid Python identifier. + + Strips characters illegal in identifiers (e.g. ``$kind`` → ``kind``) + and appends ``_`` to Python keywords. + """ + sanitized = _sanitize_identifier(name) + if keyword.iskeyword(sanitized): + sanitized += "_" + # Names starting with __ (and not ending with __) are name-mangled + # inside class bodies — prefix with underscore to avoid that. + if sanitized.startswith("__") and not sanitized.endswith("__"): + sanitized = "_" + sanitized + return sanitized + + +# --------------------------------------------------------------------------- +# JSON Schema → TypeRef conversion +# --------------------------------------------------------------------------- + + +class SchemaConverter: + """Converts a serialized River server schema into SchemaIR.""" + + def __init__(self) -> None: + self._typedicts: list[TypedDictDef] = [] + # $id → assigned Python name (for recursive $ref resolution) + self._id_to_name: dict[str, str] = {} + # Track emitted TypedDict names to detect collisions + self._td_names: set[str] = set() + + def convert(self, raw: dict) -> SchemaIR: + """Convert the top-level serialized schema dict to IR.""" + self._typedicts = [] + self._id_to_name = {} + self._td_names = set() + services: list[ServiceDef] = [] + seen_modules: dict[str, str] = {} # sanitized name → wire name + seen_classes: dict[str, str] = {} # class name → wire name + for svc_name, svc_data in raw.get("services", {}).items(): + module_name = _sanitize_identifier(svc_name) + if module_name in seen_modules: + raise ValueError( + f"services {seen_modules[module_name]!r} and " + f"{svc_name!r} both map to Python module " + f"{module_name!r}_client.py" + ) + seen_modules[module_name] = svc_name + + class_name = _to_pascal_case(svc_name) + "Client" + if class_name in seen_classes: + raise ValueError( + f"services {seen_classes[class_name]!r} and " + f"{svc_name!r} both map to Python class " + f"{class_name!r}" + ) + seen_classes[class_name] = svc_name + + svc_def = self._convert_service(svc_name, svc_data) + services.append(svc_def) + + # Parse optional handshake schema + handshake_type: TypedDictDef | None = None + hs_schema = raw.get("handshakeSchema") + if hs_schema and isinstance(hs_schema, dict): + self._schema_to_typeref(hs_schema, "HandshakeSchema") + # The TypedDict was just emitted — pop it off _typedicts + handshake_type = self._typedicts.pop() + + return SchemaIR( + services=services, + typedicts=list(self._typedicts), + handshake_type=handshake_type, + ) + + def _convert_service(self, name: str, data: dict) -> ServiceDef: + class_name = _to_pascal_case(name) + procedures: list[ProcedureDef] = [] + seen_py_names: dict[str, str] = {} # py_name → wire name + for proc_name, proc_data in data.get("procedures", {}).items(): + proc_def = self._convert_procedure(class_name, proc_name, proc_data) + if proc_def.py_name in seen_py_names: + raise ValueError( + f"service {name!r}: procedures " + f"{seen_py_names[proc_def.py_name]!r} and " + f"{proc_name!r} both map to Python method " + f"{proc_def.py_name!r}" + ) + seen_py_names[proc_def.py_name] = proc_name + procedures.append(proc_def) + return ServiceDef( + name=name, + class_name=class_name, + procedures=procedures, + ) + + def _convert_procedure(self, svc_class: str, name: str, data: dict) -> ProcedureDef: + proc_type = data["type"] + prefix = svc_class + _to_pascal_case(name) + + # Init type and streaming input type. + # Two schema formats: + # - v2 (serializeSchema): all procedures have "init"; + # stream/upload also have "input" + # - v1 (pid2 etc.): rpc/subscription use "input" as init; + # stream/upload have "init" + "input" + input_type = None + if "init" in data: + init_type = self._schema_to_typeref(data["init"], f"{prefix}Init") + if "input" in data: + input_type = self._schema_to_typeref(data["input"], f"{prefix}Input") + else: + init_type = self._schema_to_typeref(data["input"], f"{prefix}Init") + + # Output type + output_type = self._schema_to_typeref(data["output"], f"{prefix}Output") + + # Error type — separate protocol errors from service errors + error_type = self._extract_service_errors(data.get("errors"), prefix) + + description = data.get("description") + + return ProcedureDef( + name=name, + py_name=_to_snake_case(name), + proc_type=proc_type, + init_type=init_type, + input_type=input_type, + output_type=output_type, + error_type=error_type, + description=description, + ) + + def _extract_service_errors( + self, errors_schema: dict | None, prefix: str + ) -> TypeRef | None: + """Extract non-protocol errors from the errors union.""" + if errors_schema is None: + return None + + variants = errors_schema.get("anyOf", []) + service_variants = [] + for v in variants: + code_schema = v.get("properties", {}).get("code", {}) + code_const = code_schema.get("const") + if code_const and code_const in PROTOCOL_ERROR_CODES: + continue + service_variants.append(v) + + if not service_variants: + return None + + if len(service_variants) == 1: + return self._schema_to_typeref(service_variants[0], f"{prefix}Error") + + # Multiple service error variants → union + refs: list[TypeRef] = [] + for i, v in enumerate(service_variants): + code_schema = v.get("properties", {}).get("code", {}) + code_const = code_schema.get("const") + if code_const: + suffix = _to_pascal_case(code_const.lower().replace("_", " ")) + td_name = f"{prefix}Error{suffix}" + else: + td_name = f"{prefix}Error{i}" + refs.append(self._schema_to_typeref(v, td_name)) + + parts = " | ".join(r.annotation for r in refs) + return TypeRef(annotation=parts) + + def _schema_to_typeref(self, schema: dict, name_hint: str) -> TypeRef: + """Convert a JSON Schema node to a TypeRef, potentially creating TypedDicts.""" + if not isinstance(schema, dict): + return TypeRef(annotation="Any") + + # $ref → forward reference to a previously-registered $id + if "$ref" in schema: + ref_id = schema["$ref"] + if ref_id in self._id_to_name: + return TypeRef(annotation=self._id_to_name[ref_id]) + return TypeRef(annotation="Never") + + # $id → register the name before converting (enables recursive refs) + schema_id = schema.get("$id") + if schema_id is not None: + self._id_to_name[schema_id] = name_hint + + # const + if "const" in schema: + val = schema["const"] + if isinstance(val, str): + # Use repr to handle all escaping (quotes, backslashes, + # control chars) then unwrap the outer quotes and re-wrap + # with double quotes for Literal["..."] syntax. + escaped = repr(val)[1:-1].replace('"', '\\"') + return TypeRef(annotation=f'Literal["{escaped}"]') + return TypeRef(annotation=f"Literal[{val!r}]") + + # anyOf (union) + if "anyOf" in schema: + return self._convert_union(schema, name_hint) + + # allOf (intersection) — merge object properties + if "allOf" in schema: + return self._convert_intersection(schema, name_hint) + + schema_type = schema.get("type") + + # Primitive types + if schema_type == "string": + return TypeRef(annotation="str") + if schema_type == "number": + return TypeRef(annotation="float") + if schema_type == "integer": + return TypeRef(annotation="int") + if schema_type == "boolean": + return TypeRef(annotation="bool") + if schema_type == "null": + return TypeRef(annotation="None") + if schema_type == "Uint8Array": + return TypeRef(annotation="bytes") + + # Array + if schema_type == "array": + items = schema.get("items", {}) + item_ref = self._schema_to_typeref(items, f"{name_hint}Item") + return TypeRef(annotation=f"list[{item_ref.annotation}]") + + # Object → TypedDict (may also contain allOf to merge) + if schema_type == "object": + if "allOf" in schema: + return self._convert_intersection(schema, name_hint) + return self._convert_object(schema, name_hint) + + # Fallback + return TypeRef(annotation="Any") + + def _emit_typedict(self, td: TypedDictDef) -> None: + """Register a TypedDict, skipping if the same name was already emitted.""" + if td.name in self._td_names: + return + self._td_names.add(td.name) + self._typedicts.append(td) + + def _convert_object(self, schema: dict, name: str) -> TypeRef: + """Convert a JSON Schema object to a TypedDict and return a ref to it.""" + # patternProperties with a catch-all pattern → dict[str, ValueType] + pattern_props = schema.get("patternProperties", {}) + if pattern_props and not schema.get("properties"): + values = list(pattern_props.values()) + if len(values) == 1: + value_ref = self._schema_to_typeref(values[0], f"{name}Value") + val_ann = value_ref.annotation + else: + value_refs = [ + self._schema_to_typeref(v, f"{name}Value{i}") + for i, v in enumerate(values) + ] + unique = list(dict.fromkeys(r.annotation for r in value_refs)) + val_ann = unique[0] if len(unique) == 1 else " | ".join(unique) + return TypeRef(annotation=f"dict[str, {val_ann}]") + + properties = schema.get("properties", {}) + required_set = set(schema.get("required", [])) + description = schema.get("description") + + fields: list[TypedDictField] = [] + seen_field_names: dict[str, str] = {} # normalized → original + for prop_name, prop_schema in properties.items(): + field_name = _safe_field_name(prop_name) + if field_name in seen_field_names: + raise ValueError( + f"TypedDict {name!r}: properties " + f"{seen_field_names[field_name]!r} and {prop_name!r} " + f"both normalize to field {field_name!r}" + ) + seen_field_names[field_name] = prop_name + nested_name = name + _to_pascal_case(prop_name) + field_ref = self._schema_to_typeref(prop_schema, nested_name) + field_desc = ( + prop_schema.get("description") + if isinstance(prop_schema, dict) + else None + ) + fields.append( + TypedDictField( + name=field_name, + type_ref=field_ref, + required=prop_name in required_set, + description=field_desc, + ) + ) + + td = TypedDictDef(name=name, fields=fields, description=description) + self._emit_typedict(td) + return TypeRef(annotation=name) + + def _convert_intersection(self, schema: dict, name_hint: str) -> TypeRef: + """Convert a JSON Schema allOf to a merged TypedDict. + + Object variants have their properties merged into a single + TypedDict. A field is required if it appears in the ``required`` + list of *any* variant (intersection semantics). Non-object + variants and empty allOf produce ``Never`` since they represent + unrepresentable or contradictory intersections. + """ + variants = schema.get("allOf", []) + if not variants: + return TypeRef(annotation="Never") + + # Partition into object-like variants and other variants + object_variants: list[dict] = [] + other_variants: list[dict] = [] + for v in variants: + if not isinstance(v, dict): + continue + v_type = v.get("type") + if v_type == "object" or "properties" in v: + object_variants.append(v) + else: + other_variants.append(v) + + # Mixed object + non-object is contradictory (object ∩ number = ∅) + if object_variants and other_variants: + return TypeRef(annotation="Never") + + # Pure object intersection — merge properties + if object_variants: + merged_props: dict[str, dict] = {} + merged_required: set[str] = set() + for v in object_variants: + for prop_name, prop_schema in v.get("properties", {}).items(): + merged_props[prop_name] = prop_schema + merged_required.update(v.get("required", [])) + + description = schema.get("description") + fields: list[TypedDictField] = [] + seen_field_names: dict[str, str] = {} + for prop_name, prop_schema in merged_props.items(): + field_name = _safe_field_name(prop_name) + if field_name in seen_field_names: + raise ValueError( + f"TypedDict {name_hint!r}: properties " + f"{seen_field_names[field_name]!r} and {prop_name!r} " + f"both normalize to field {field_name!r}" + ) + seen_field_names[field_name] = prop_name + nested_name = name_hint + _to_pascal_case(prop_name) + field_ref = self._schema_to_typeref(prop_schema, nested_name) + field_desc = ( + prop_schema.get("description") + if isinstance(prop_schema, dict) + else None + ) + fields.append( + TypedDictField( + name=field_name, + type_ref=field_ref, + required=prop_name in merged_required, + description=field_desc, + ) + ) + td = TypedDictDef(name=name_hint, fields=fields, description=description) + self._emit_typedict(td) + return TypeRef(annotation=name_hint) + + # Only non-object variants — contradictory primitive intersection + return TypeRef(annotation="Never") + + def _convert_union(self, schema: dict, name_hint: str) -> TypeRef: + """Convert a JSON Schema anyOf to a Union type.""" + variants = schema.get("anyOf", []) + if len(variants) == 0: + return TypeRef(annotation="Never") + if len(variants) == 1: + return self._schema_to_typeref(variants[0], name_hint) + + refs: list[TypeRef] = [] + for i, v in enumerate(variants): + # Try to derive a meaningful name from a const code or description + variant_name = self._derive_variant_name(v, name_hint, i) + refs.append(self._schema_to_typeref(v, variant_name)) + + parts = " | ".join(r.annotation for r in refs) + return TypeRef(annotation=parts) + + def _derive_variant_name(self, variant: dict, base_name: str, index: int) -> str: + """Derive a name for a union variant.""" + # Check for a const code field + props = variant.get("properties", {}) + code_schema = props.get("code", {}) + if isinstance(code_schema, dict) and "const" in code_schema: + code_val = code_schema["const"] + suffix = _to_pascal_case( + code_val.lower().replace("_", " ").replace("-", " ") + ) + return f"{base_name}{suffix}" + + # Check for description + desc = variant.get("description") + if desc: + safe = re.sub(r"[^a-zA-Z0-9]", "", desc) + if safe: + return f"{base_name}{_to_pascal_case(safe)}" + + return f"{base_name}Variant{index}" diff --git a/python-client/river/codegen/templates/errors.py.j2 b/python-client/river/codegen/templates/errors.py.j2 new file mode 100644 index 00000000..ba3e8c37 --- /dev/null +++ b/python-client/river/codegen/templates/errors.py.j2 @@ -0,0 +1,45 @@ +"""Protocol-level error types for the River protocol. + +These errors can be returned by any procedure regardless of its +service-specific error schema. +""" + +from __future__ import annotations + +from typing import Literal + +from typing_extensions import NotRequired, TypedDict + + +class UncaughtError(TypedDict): + code: Literal["UNCAUGHT_ERROR"] + message: str + + +class UnexpectedDisconnect(TypedDict): + code: Literal["UNEXPECTED_DISCONNECT"] + message: str + + +class InvalidRequestExtrasItem(TypedDict): + path: str + message: str + + +class InvalidRequestExtras(TypedDict): + firstValidationErrors: list[InvalidRequestExtrasItem] + totalErrors: float + + +class InvalidRequest(TypedDict): + code: Literal["INVALID_REQUEST"] + message: str + extras: NotRequired[InvalidRequestExtras] + + +class Cancel(TypedDict): + code: Literal["CANCEL"] + message: str + + +ProtocolError = UncaughtError | UnexpectedDisconnect | InvalidRequest | Cancel diff --git a/python-client/river/codegen/templates/init.py.j2 b/python-client/river/codegen/templates/init.py.j2 new file mode 100644 index 00000000..2909e173 --- /dev/null +++ b/python-client/river/codegen/templates/init.py.j2 @@ -0,0 +1,11 @@ +"""Generated River service clients.""" + +{% for mod, cls in imports %} +from {{ mod }} import {{ cls }} +{% endfor %} + +__all__ = [ +{% for _, cls in imports %} + "{{ cls }}", +{% endfor %} +] diff --git a/python-client/river/codegen/templates/root_client.py.j2 b/python-client/river/codegen/templates/root_client.py.j2 new file mode 100644 index 00000000..6833d7c4 --- /dev/null +++ b/python-client/river/codegen/templates/root_client.py.j2 @@ -0,0 +1,18 @@ +"""Generated root client aggregating all service clients.""" + +from __future__ import annotations + +from river.client import RiverClient +{% for mod, cls in imports %} +from {{ mod }} import {{ cls }} +{% endfor %} + + +class {{ client_name }}: + """Aggregated client for all services.""" + + def __init__(self, client: RiverClient) -> None: + self._client = client +{% for attr_name, cls in services %} + self.{{ attr_name }} = {{ cls }}(client) +{% endfor %} diff --git a/python-client/river/codegen/templates/service_client.py.j2 b/python-client/river/codegen/templates/service_client.py.j2 new file mode 100644 index 00000000..89959804 --- /dev/null +++ b/python-client/river/codegen/templates/service_client.py.j2 @@ -0,0 +1,104 @@ +"""Generated client for the {{ service.name }} service.""" + +from __future__ import annotations + +import asyncio +{% if needs_literal %} +from typing import Any, Literal +{% else %} +from typing import Any +{% endif %} + +from river.client import ( + ErrResult, + OkResult, + RiverClient, + StreamResult, + SubscriptionResult, + UploadResult, +) +{% if type_names %} + +from {{ types_module }} import ( +{% for name in type_names %} + {{ name }}, +{% endfor %} +) +{% endif %} + +from ._errors import ProtocolError + + +class {{ service.class_name }}Client: + """Typed client for the ``{{ service.name }}`` service.""" + + def __init__(self, client: RiverClient) -> None: + self._client = client +{% for proc in service.procedures %} + +{% if proc.proc_type == "rpc" %} + async def {{ proc.py_name }}( + self, + init: {{ proc.init_type.annotation }}, + *, + abort_signal: asyncio.Event | None = None, + ) -> {{ proc | result_type }}: +{% if proc.description %} + """{{ proc.description | docstring }}""" +{% endif %} + return await self._client.rpc( + "{{ service.name }}", + "{{ proc.name }}", + init, + abort_signal=abort_signal, + ) +{% elif proc.proc_type == "stream" %} + def {{ proc.py_name }}( + self, + init: {{ proc.init_type.annotation }}, + *, + abort_signal: asyncio.Event | None = None, + ) -> StreamResult[{{ proc.input_type.annotation }}, {{ proc | result_type }}]: +{% if proc.description %} + """{{ proc.description | docstring }}""" +{% endif %} + return self._client.stream( + "{{ service.name }}", + "{{ proc.name }}", + init, + abort_signal=abort_signal, + ) +{% elif proc.proc_type == "upload" %} + def {{ proc.py_name }}( + self, + init: {{ proc.init_type.annotation }}, + *, + abort_signal: asyncio.Event | None = None, + ) -> UploadResult[{{ proc.input_type.annotation }}, {{ proc | result_type }}]: +{% if proc.description %} + """{{ proc.description | docstring }}""" +{% endif %} + return self._client.upload( + "{{ service.name }}", + "{{ proc.name }}", + init, + abort_signal=abort_signal, + ) +{% elif proc.proc_type == "subscription" %} + def {{ proc.py_name }}( + self, + init: {{ proc.init_type.annotation }}, + *, + abort_signal: asyncio.Event | None = None, + ) -> SubscriptionResult[{{ proc | result_type }}]: +{% if proc.description %} + """{{ proc.description | docstring }}""" +{% endif %} + return self._client.subscribe( + "{{ service.name }}", + "{{ proc.name }}", + init, + abort_signal=abort_signal, + ) +{% endif %} +{% endfor %} diff --git a/python-client/river/codegen/templates/types.py.j2 b/python-client/river/codegen/templates/types.py.j2 new file mode 100644 index 00000000..0cc2eab9 --- /dev/null +++ b/python-client/river/codegen/templates/types.py.j2 @@ -0,0 +1,30 @@ +"""Generated type definitions for River services.""" + +from __future__ import annotations +{% if needs_literal %} + +from typing import Literal +{% endif %} + +from typing_extensions import {{ typing_ext_imports | join(", ") }} +{% for td in typedicts %} + + +class {{ td.name }}(TypedDict): +{% if td.description %} + """{{ td.description | docstring }}""" + +{% endif %} +{% if not td.fields %} + pass +{% else %} +{% for f in td.fields %} +{% if f.description %} + {{ f.name }}: {{ f.annotation }} + """{{ f.description | docstring }}""" +{% else %} + {{ f.name }}: {{ f.annotation }} +{% endif %} +{% endfor %} +{% endif %} +{% endfor %} diff --git a/python-client/river/session.py b/python-client/river/session.py new file mode 100644 index 00000000..dcdb5748 --- /dev/null +++ b/python-client/river/session.py @@ -0,0 +1,357 @@ +"""Session state machine for River protocol. + +Manages seq/ack bookkeeping, send buffers, and session lifecycle. +""" + +from __future__ import annotations + +import asyncio +import logging +import time +from dataclasses import dataclass +from enum import Enum +from typing import Any, Callable + +from river.codec import CodecMessageAdapter +from river.types import ( + PartialTransportMessage, + TransportMessage, + generate_id, + handshake_request_payload, + heartbeat_message, +) + +logger = logging.getLogger(__name__) + + +class SessionState(str, Enum): + """Session state machine states.""" + + NO_CONNECTION = "NoConnection" + BACKING_OFF = "BackingOff" + CONNECTING = "Connecting" + HANDSHAKING = "Handshaking" + CONNECTED = "Connected" + + +@dataclass(frozen=True) +class SessionOptions: + """Configuration options for a session.""" + + heartbeat_interval_ms: float = 1000 + heartbeats_until_dead: int = 2 + session_disconnect_grace_ms: float = 5000 + connection_timeout_ms: float = 2000 + handshake_timeout_ms: float = 1000 + enable_transparent_reconnects: bool = True + + +DEFAULT_SESSION_OPTIONS = SessionOptions() + + +class Session: + """Represents a River session with seq/ack bookkeeping and send buffer. + + A session persists across potentially multiple connections, tracking + all the state needed for transparent reconnection. + """ + + def __init__( + self, + session_id: str, + from_id: str, + to_id: str, + codec: CodecMessageAdapter, + options: SessionOptions | None = None, + ) -> None: + self.id = session_id + self.from_id = from_id + self.to_id = to_id + self.codec = codec + self.options = options or DEFAULT_SESSION_OPTIONS + + # Seq/ack bookkeeping + self.seq: int = 0 # Next seq to assign when sending + self.ack: int = 0 # Next expected seq from the other side + self.send_buffer: list[TransportMessage] = [] + + # State machine + self.state: SessionState = SessionState.NO_CONNECTION + + # Connection + self._ws: Any = None # The WebSocket connection + self._is_actively_heartbeating: bool = False + + # Timers + self._heartbeat_task: asyncio.Task | None = None + self._heartbeat_miss_task: asyncio.Task | None = None + self._grace_period_task: asyncio.Task | None = None + self._grace_expiry_time: float | None = None + + # Callbacks + self._on_message: Callable[[TransportMessage], None] | None = None + self._on_connection_closed: Callable[[], None] | None = None + self._on_session_grace_elapsed: Callable[[], None] | None = None + + self._destroyed = False + + @property + def next_seq(self) -> int: + """The next seq the other side should see from us. + + Returns the seq of the first unacked message in the buffer, + or our current seq if the buffer is empty. + """ + if self.send_buffer: + return self.send_buffer[0].seq + return self.seq + + def construct_msg(self, partial: PartialTransportMessage) -> TransportMessage: + """Construct a full TransportMessage from a partial one. + + Fills in id, from, to, seq, ack and increments seq. + """ + msg = TransportMessage( + id=generate_id(), + from_=self.from_id, + to=self.to_id, + seq=self.seq, + ack=self.ack, + payload=partial.payload, + stream_id=partial.stream_id, + control_flags=partial.control_flags, + service_name=partial.service_name, + procedure_name=partial.procedure_name, + tracing=partial.tracing, + ) + self.seq += 1 + return msg + + def send(self, partial: PartialTransportMessage) -> tuple[bool, str]: + """Construct and send a message. + + When connected, sends immediately over the wire and buffers. + When disconnected, only buffers. + + Returns (True, msg_id) on success, (False, reason) on failure. + """ + msg = self.construct_msg(partial) + self.send_buffer.append(msg) + + if self.state == SessionState.CONNECTED and self._ws is not None: + ok, result = self._send_over_wire(msg) + if not ok: + # Send failure is fatal — the caller (transport) + # is expected to destroy the session. + return False, result + return True, msg.id + + def _send_over_wire(self, msg: TransportMessage) -> tuple[bool, str]: + """Serialize and send a message over the current connection.""" + ok, buf_or_err = self.codec.to_buffer(msg) + if not ok: + assert isinstance(buf_or_err, str) + return False, buf_or_err + assert isinstance(buf_or_err, bytes) + try: + assert self._ws is not None + loop = asyncio.get_running_loop() + task = loop.create_task(self._ws.send(buf_or_err)) + task.add_done_callback(self._on_ws_send_done) + return True, msg.id + except Exception as e: + return False, f"Failed to send: {e}" + + def _on_ws_send_done(self, task: asyncio.Task) -> None: + """Handle completion of an async ws.send(). + + If the send failed, trigger the connection-closed callback so the + transport can reconnect and replay the send buffer — matching how + the TS side relies on synchronous send exceptions. + """ + if task.cancelled(): + return + exc = task.exception() + if exc is not None: + logger.error("WebSocket send error: %s", exc) + if not self._destroyed and self._on_connection_closed: + self._on_connection_closed() + + def send_buffered_messages(self) -> tuple[bool, str | None]: + """Retransmit all buffered messages over the current connection. + + Called after a successful reconnection handshake. + """ + for msg in self.send_buffer: + ok, reason = self._send_over_wire(msg) + if not ok: + return False, reason + return True, None + + def update_bookkeeping(self, their_ack: int, their_seq: int) -> None: + """Update seq/ack bookkeeping based on an incoming message. + + - Removes acknowledged messages from the send buffer. + - Updates our ack to their_seq + 1. + - Resets the heartbeat miss timeout. + """ + # Remove acked messages from send buffer + self.send_buffer = [m for m in self.send_buffer if m.seq >= their_ack] + # Update our ack + self.ack = their_seq + 1 + # Reset heartbeat miss timer + self._reset_heartbeat_miss_timeout() + + def send_heartbeat(self) -> None: + """Send a heartbeat message.""" + self.send(heartbeat_message()) + + def start_active_heartbeat(self, loop: asyncio.AbstractEventLoop) -> None: + """Start sending heartbeats at the configured interval (server behavior).""" + self._is_actively_heartbeating = True + interval = self.options.heartbeat_interval_ms / 1000.0 + + async def _heartbeat_loop(): + try: + while not self._destroyed and self.state == SessionState.CONNECTED: + await asyncio.sleep(interval) + if not self._destroyed and self.state == SessionState.CONNECTED: + self.send_heartbeat() + except asyncio.CancelledError: + pass + + self._heartbeat_task = loop.create_task(_heartbeat_loop()) + + def start_heartbeat_miss_timeout(self, loop: asyncio.AbstractEventLoop) -> None: + """Start the missing heartbeat timeout.""" + miss_duration = ( + self.options.heartbeats_until_dead + * self.options.heartbeat_interval_ms + / 1000.0 + ) + + async def _miss_timeout(): + try: + await asyncio.sleep(miss_duration) + if not self._destroyed and self._on_connection_closed: + logger.debug( + "Session %s: heartbeat miss timeout, closing connection", + self.id, + ) + self._on_connection_closed() + except asyncio.CancelledError: + pass + + if self._heartbeat_miss_task: + self._heartbeat_miss_task.cancel() + self._heartbeat_miss_task = loop.create_task(_miss_timeout()) + + def _reset_heartbeat_miss_timeout(self) -> None: + """Reset the heartbeat miss timer.""" + if self._heartbeat_miss_task: + self._heartbeat_miss_task.cancel() + self._heartbeat_miss_task = None + try: + loop = asyncio.get_running_loop() + if loop.is_running(): + self.start_heartbeat_miss_timeout(loop) + except RuntimeError: + pass + + def start_grace_period(self, loop: asyncio.AbstractEventLoop) -> None: + """Start the session disconnect grace period. + + If the session is not reconnected within this time, it's destroyed. + """ + grace_ms = self.options.session_disconnect_grace_ms + self._grace_expiry_time = time.monotonic() + grace_ms / 1000.0 + + async def _grace_timeout(): + try: + await asyncio.sleep(grace_ms / 1000.0) + if not self._destroyed and self._on_session_grace_elapsed: + logger.debug( + "Session %s: grace period elapsed, destroying", self.id + ) + self._on_session_grace_elapsed() + except asyncio.CancelledError: + pass + + if self._grace_period_task: + self._grace_period_task.cancel() + self._grace_period_task = loop.create_task(_grace_timeout()) + + def cancel_grace_period(self) -> None: + """Cancel the session disconnect grace period.""" + if self._grace_period_task: + self._grace_period_task.cancel() + self._grace_period_task = None + self._grace_expiry_time = None + + def cancel_heartbeats(self) -> None: + """Cancel all heartbeat-related tasks.""" + if self._heartbeat_task: + self._heartbeat_task.cancel() + self._heartbeat_task = None + if self._heartbeat_miss_task: + self._heartbeat_miss_task.cancel() + self._heartbeat_miss_task = None + self._is_actively_heartbeating = False + + def set_connected(self, ws: Any, loop: asyncio.AbstractEventLoop) -> None: + """Transition to connected state.""" + self.state = SessionState.CONNECTED + self._ws = ws + self.cancel_grace_period() + self.start_heartbeat_miss_timeout(loop) + + def set_disconnected(self, loop: asyncio.AbstractEventLoop) -> None: + """Transition to disconnected state (no connection).""" + self.state = SessionState.NO_CONNECTION + self.cancel_heartbeats() + old_ws = self._ws + self._ws = None + if old_ws is not None: + try: + asyncio.ensure_future(old_ws.close()) + except Exception: + pass + self.start_grace_period(loop) + + def destroy(self) -> None: + """Destroy the session, cleaning up all resources.""" + self._destroyed = True + self.cancel_heartbeats() + self.cancel_grace_period() + if self._ws is not None: + try: + asyncio.ensure_future(self._ws.close()) + except Exception: + pass + self._ws = None + self.send_buffer.clear() + + def create_handshake_request( + self, metadata: Any = None, tracing: dict[str, str] | None = None + ) -> TransportMessage: + """Create a handshake request transport message. + + Handshake messages have seq=0, ack=0, controlFlags=0. + """ + payload = handshake_request_payload( + session_id=self.id, + next_expected_seq=self.ack, + next_sent_seq=self.next_seq, + metadata=metadata, + ) + return TransportMessage( + id=generate_id(), + from_=self.from_id, + to=self.to_id, + seq=0, + ack=0, + payload=payload, + stream_id=generate_id(), + control_flags=0, + tracing=tracing, + ) diff --git a/python-client/river/streams.py b/python-client/river/streams.py new file mode 100644 index 00000000..6c045435 --- /dev/null +++ b/python-client/river/streams.py @@ -0,0 +1,237 @@ +"""Readable and Writable stream abstractions for River procedures.""" + +from __future__ import annotations + +import asyncio +from typing import Callable, Generic, TypeVar + +T = TypeVar("T") + + +class ReadableBrokenError(Exception): + """Raised when a readable stream is broken.""" + + pass + + +class Readable(Generic[T]): + """Async readable stream for consuming procedure results. + + Supports async iteration via `async for` and explicit read via `next()`. + """ + + def __init__(self) -> None: + self._queue: list[T] = [] + self._closed = False + self._broken = False + self._locked = False + self._locked_by_consumer = False # locked by collect() or __aiter__ + self._waiters: list[asyncio.Future[None]] = [] + + def _push_value(self, value: T) -> None: + """Push a value into the readable stream (internal use).""" + if self._closed: + raise RuntimeError("Cannot push to a closed readable") + if self._broken: + return # Discard values after break to prevent unbounded buffering + self._queue.append(value) + self._notify_waiters() + + def _trigger_close(self) -> None: + """Close the readable stream (internal use).""" + if self._closed: + raise RuntimeError("Readable already closed") + self._closed = True + self._notify_waiters() + + def _notify_waiters(self) -> None: + while self._waiters: + w = self._waiters.pop(0) + if not w.done(): + w.set_result(None) + + def is_readable(self) -> bool: + """Whether the stream can still be iterated (not locked or broken).""" + return not self._locked and not self._broken + + def is_closed(self) -> bool: + """Whether the stream is fully consumed (closed and queue drained).""" + return self._closed and len(self._queue) == 0 + + def _has_values_in_queue(self) -> bool: + """Whether there are buffered values waiting to be consumed.""" + return len(self._queue) > 0 + + def break_(self) -> None: + """Break the stream, discarding all queued values. + + If the stream is already closed and the queue is empty, + this is a no-op (the stream is already done). + """ + if self._locked and self._broken: + return + self._locked = True + # If stream is already done (closed + empty), don't signal broken + if self._closed and len(self._queue) == 0: + self._notify_waiters() + return + self._broken = True + self._queue.clear() + self._notify_waiters() + + async def collect(self) -> list[T]: + """Consume all values from the stream until it closes. + + Locks the stream. Raises TypeError if already locked. + Returns the list of all values. + """ + if self._locked: + raise TypeError("Readable is already locked") + self._locked = True + self._locked_by_consumer = True + results: list[T] = [] + async for item in self._iterate(): + results.append(item) + return results + + async def next(self) -> tuple[bool, T | None]: + """Read the next value from the stream. + + Returns (False, value) if a value is available. + Returns (True, None) if the stream is done. + """ + if self._locked_by_consumer: + raise TypeError("Readable is already locked") + async for item in self._iterate(): + return False, item + return True, None + + async def _iterate(self): + """Internal async generator.""" + self._locked = True + while True: + if self._broken: + yield { + "ok": False, + "payload": { + "code": "READABLE_BROKEN", + "message": "stream was broken", + }, + } + return + + if self._queue: + yield self._queue.pop(0) + continue + + if self._closed: + return + + # Wait for more data + loop = asyncio.get_running_loop() + fut: asyncio.Future[None] = loop.create_future() + self._waiters.append(fut) + await fut + + def __aiter__(self): + if self._locked: + raise TypeError("Readable is already locked") + self._locked = True + self._locked_by_consumer = True + return _ReadableIterator(self) + + +class _ReadableIterator: + """Async iterator for Readable that cleans up on break/close. + + Unlike an async generator, this class handles ``__del__`` + synchronously, ensuring the queue is cleared when a for-await + loop breaks out. + """ + + def __init__(self, readable: Readable) -> None: + self._readable = readable + self._done = False + + def __aiter__(self): + return self + + async def __anext__(self): + if self._done: + raise StopAsyncIteration + + r = self._readable + while True: + if r._broken: + val = { + "ok": False, + "payload": { + "code": "READABLE_BROKEN", + "message": "stream was broken", + }, + } + # After yielding the broken error, the iterator is done + self._done = True + return val + + if r._queue: + return r._queue.pop(0) + + if r._closed: + raise StopAsyncIteration + + loop = asyncio.get_running_loop() + fut: asyncio.Future[None] = loop.create_future() + r._waiters.append(fut) + await fut + + def __del__(self): + # Synchronous cleanup when the iterator is GC'd (e.g. break in for-await) + self._readable._broken = True + self._readable._queue.clear() + # Wake any pending waiters so they don't block forever + for w in self._readable._waiters: + if not w.done(): + w.set_result(None) + self._readable._waiters.clear() + + +class Writable(Generic[T]): + """Writable stream for sending procedure requests. + + Wraps a write callback and a close callback. + """ + + def __init__( + self, + write_cb: Callable[[T], None], + close_cb: Callable[[], None] | None = None, + ) -> None: + self._write_cb = write_cb + self._close_cb = close_cb + self._closed = False + + def write(self, value: T) -> None: + """Write a value to the stream.""" + if self._closed: + raise RuntimeError("Cannot write to a closed writable") + self._write_cb(value) + + def close(self, value: T | None = None) -> None: + """Close the stream, optionally writing a final value.""" + if self._closed: + return # Idempotent + if value is not None: + self._write_cb(value) + self._closed = True + # Nullify callbacks after invocation to prevent reuse + self._write_cb = lambda _: None + if self._close_cb: + self._close_cb() + self._close_cb = None + + def is_writable(self) -> bool: + return not self._closed + + def is_closed(self) -> bool: + return self._closed diff --git a/python-client/river/transport.py b/python-client/river/transport.py new file mode 100644 index 00000000..ad202fb8 --- /dev/null +++ b/python-client/river/transport.py @@ -0,0 +1,591 @@ +"""Client transport layer for the River protocol. + +Manages WebSocket connections, session lifecycle, handshake, +reconnection with backoff, and message dispatch. +""" + +from __future__ import annotations + +import asyncio +import logging +import random +from typing import Any, Callable + +from opentelemetry import propagate + +from river.codec import BinaryCodec, Codec, CodecMessageAdapter +from river.session import DEFAULT_SESSION_OPTIONS, Session, SessionOptions, SessionState +from river.types import ( + RETRIABLE_HANDSHAKE_CODES, + PartialTransportMessage, + TransportMessage, + generate_id, + is_ack, +) + +logger = logging.getLogger(__name__) + + +class EventDispatcher: + """Simple event dispatcher with typed event names.""" + + def __init__(self) -> None: + self._handlers: dict[str, set[Callable]] = {} + + def add_listener(self, event: str, handler: Callable) -> None: + if event not in self._handlers: + self._handlers[event] = set() + self._handlers[event].add(handler) + + def remove_listener(self, event: str, handler: Callable) -> None: + if event in self._handlers: + self._handlers[event].discard(handler) + + def dispatch(self, event: str, data: Any = None) -> None: + if event in self._handlers: + # Copy to avoid mutation during iteration + for handler in list(self._handlers[event]): + try: + handler(data) + except Exception as e: + logger.error("Event handler error for %s: %s", event, e) + + def listener_count(self, event: str) -> int: + return len(self._handlers.get(event, set())) + + +class LeakyBucketRateLimit: + """Rate limiter with exponential backoff for connection retries.""" + + def __init__( + self, + base_interval_ms: float = 150, + max_jitter_ms: float = 200, + max_backoff_ms: float = 32_000, + attempt_budget_capacity: int = 5, + budget_restore_interval_ms: float = 200, + ) -> None: + self.base_interval_ms = base_interval_ms + self.max_jitter_ms = max_jitter_ms + self.max_backoff_ms = max_backoff_ms + self.attempt_budget_capacity = attempt_budget_capacity + self.budget_restore_interval_ms = budget_restore_interval_ms + self.budget_consumed: int = 0 + self._restore_task: asyncio.Task | None = None + + def has_budget(self) -> bool: + return self.budget_consumed < self.attempt_budget_capacity + + def get_backoff_ms(self) -> float: + if self.budget_consumed == 0: + return 0 + exponent = max(0, self.budget_consumed - 1) + jitter = random.random() * self.max_jitter_ms + backoff = min(self.base_interval_ms * (2**exponent), self.max_backoff_ms) + return backoff + jitter + + def consume_budget(self) -> None: + self._stop_restore() + self.budget_consumed += 1 + + def start_restoring_budget(self) -> None: + """Start gradually restoring budget after a successful connection.""" + self._stop_restore() + + async def _restore_loop(): + try: + while self.budget_consumed > 0: + await asyncio.sleep(self.budget_restore_interval_ms / 1000.0) + self.budget_consumed = max(0, self.budget_consumed - 1) + except asyncio.CancelledError: + pass + + try: + loop = asyncio.get_running_loop() + self._restore_task = loop.create_task(_restore_loop()) + except RuntimeError: + pass + + def _stop_restore(self) -> None: + if self._restore_task: + self._restore_task.cancel() + self._restore_task = None + + def reset(self) -> None: + self.budget_consumed = 0 + self._stop_restore() + + +class WebSocketClientTransport: + """Client-side transport managing WebSocket connections and sessions. + + Handles connection lifecycle, handshakes, reconnection with backoff, + heartbeat echo, and message dispatch. + """ + + def __init__( + self, + ws_url: str | Callable[..., str], + client_id: str | None = None, + server_id: str | None = None, + codec: Codec | None = None, + options: SessionOptions | None = None, + handshake_metadata: Any = None, + ) -> None: + self.client_id = client_id or generate_id() + self.server_id = server_id or "SERVER" + self._ws_url = ws_url + self._codec = codec or BinaryCodec() + self._codec_adapter = CodecMessageAdapter(self._codec) + self.options = options or DEFAULT_SESSION_OPTIONS + self._handshake_metadata = handshake_metadata + + # State + self._status: str = "open" # 'open' | 'closed' + self.sessions: dict[str, Session] = {} # to_id -> Session + self._events = EventDispatcher() + self._retry_budget = LeakyBucketRateLimit() + self._reconnect_on_connection_drop = self.options.enable_transparent_reconnects + + # Connection tasks + self._connect_tasks: dict[str, asyncio.Task] = {} + + self._loop: asyncio.AbstractEventLoop | None = None + + def get_status(self) -> str: + return self._status + + def _get_loop(self) -> asyncio.AbstractEventLoop: + if self._loop is None: + self._loop = asyncio.get_running_loop() + return self._loop + + # --- Event API --- + + def add_event_listener(self, event: str, handler: Callable) -> None: + self._events.add_listener(event, handler) + + def remove_event_listener(self, event: str, handler: Callable) -> None: + self._events.remove_listener(event, handler) + + # --- Session Management --- + + def _get_or_create_session(self, to: str) -> Session: + """Get an existing session or create a new unconnected one.""" + if to in self.sessions: + return self.sessions[to] + session = Session( + session_id=generate_id(), + from_id=self.client_id, + to_id=to, + codec=self._codec_adapter, + options=self.options, + ) + session._on_session_grace_elapsed = lambda: self._on_session_grace_elapsed(to) + self.sessions[to] = session + self._events.dispatch( + "sessionStatus", {"status": "created", "session": session} + ) + return session + + def _delete_session(self, to: str, emit_closing: bool = True) -> None: + """Delete a session and clean up.""" + session = self.sessions.pop(to, None) + if session is None: + return + if emit_closing: + self._events.dispatch( + "sessionStatus", {"status": "closing", "session": session} + ) + session.destroy() + self._events.dispatch("sessionStatus", {"status": "closed", "session": session}) + + def _on_session_grace_elapsed(self, to: str) -> None: + """Called when a session's grace period expires.""" + logger.debug("Session grace period elapsed for %s", to) + self._delete_session(to) + + # --- Connection Flow --- + + def connect(self, to: str | None = None) -> None: + """Initiate a connection to the given server. + + Follows the state transition: + NoConnection -> BackingOff -> Connecting -> Handshaking -> Connected + """ + to = to or self.server_id + if self._status != "open": + return + + session = self._get_or_create_session(to) + if session.state != SessionState.NO_CONNECTION: + return # Already connecting/connected + + if not self._retry_budget.has_budget(): + self._events.dispatch( + "protocolError", + {"type": "conn_retry_exceeded", "message": "Retries exceeded"}, + ) + self._delete_session(to) + return + + backoff_ms = self._retry_budget.get_backoff_ms() + self._retry_budget.consume_budget() + + # Schedule the connection attempt after backoff + loop = self._get_loop() + session.state = SessionState.BACKING_OFF + + async def _do_connect(): + ws = None + try: + if backoff_ms > 0: + await asyncio.sleep(backoff_ms / 1000.0) + + if self._status != "open" or session._destroyed: + return + + session.state = SessionState.CONNECTING + ws = await self._create_connection(to) + + if session._destroyed: + await ws.close() + return + + session.state = SessionState.HANDSHAKING + await self._do_handshake(session, ws, to) + except asyncio.CancelledError: + # Clean up socket if we got cancelled mid-handshake + if ws is not None and session._ws is not ws: + await ws.close() + except Exception as e: + logger.debug("Connection attempt failed for %s: %s", to, e) + if not session._destroyed: + self._on_connection_failed(to) + + task = loop.create_task(_do_connect()) + self._connect_tasks[to] = task + + async def _create_connection(self, to: str) -> Any: + """Create a new WebSocket connection.""" + import websockets + + url = self._ws_url if isinstance(self._ws_url, str) else self._ws_url(to) + + ws = await asyncio.wait_for( + websockets.connect( + url, max_size=None, ping_interval=None, ping_timeout=None + ), + timeout=self.options.connection_timeout_ms / 1000.0, + ) + return ws + + def _get_otel_propagation_context(self) -> dict[str, str] | None: + """Extract OTel propagation context.""" + ctx: dict[str, str] = {} + propagate.inject(ctx) + result = {} + if ctx.get("traceparent"): + result["traceparent"] = ctx["traceparent"] + if ctx.get("tracestate"): + result["tracestate"] = ctx["tracestate"] + return result or None + + async def _do_handshake(self, session: Session, ws: Any, to: str) -> None: + """Perform the handshake on a newly connected WebSocket.""" + # Send handshake request + tracing = self._get_otel_propagation_context() + hs_msg = session.create_handshake_request( + metadata=self._handshake_metadata, tracing=tracing + ) + ok, buf = self._codec_adapter.to_buffer(hs_msg) + if not ok: + # Handshake send failure is fatal — destroy session + logger.error("Failed to encode handshake: %s", buf) + await ws.close() + self._events.dispatch( + "protocolError", + {"type": "message_send_failure", "message": buf}, + ) + self._delete_session(to) + return + + await ws.send(buf) + + # Wait for handshake response + try: + response_bytes = await asyncio.wait_for( + ws.recv(), timeout=self.options.handshake_timeout_ms / 1000.0 + ) + except (asyncio.TimeoutError, Exception) as e: + logger.debug("Handshake timeout/error for %s: %s", to, e) + await ws.close() + self._on_connection_failed(to) + return + + if isinstance(response_bytes, str): + response_bytes = response_bytes.encode("utf-8") + + ok, result = self._codec_adapter.from_buffer(response_bytes) + if not ok: + # Invalid handshake response is fatal + logger.error("Failed to decode handshake response: %s", result) + await ws.close() + self._delete_session(to) + return + + response_msg: TransportMessage = result # type: ignore[assignment] + payload = response_msg.payload + + # Validate handshake response + if not isinstance(payload, dict) or payload.get("type") != "HANDSHAKE_RESP": + # Invalid handshake schema is fatal + logger.error("Invalid handshake response payload") + await ws.close() + self._delete_session(to) + return + + status = payload.get("status", {}) + if not isinstance(status, dict): + logger.error( + "Invalid handshake status: expected dict, got %s", + type(status).__name__, + ) + await ws.close() + self._delete_session(to) + return + + if not status.get("ok"): + code = status.get("code", "UNKNOWN") + reason = status.get("reason", "Unknown reason") + logger.debug("Handshake rejected for %s: %s (%s)", to, reason, code) + await ws.close() + + if code in RETRIABLE_HANDSHAKE_CODES: + # Session state mismatch - destroy session and retry + self._delete_session(to) + self._try_reconnecting(to) + else: + # Fatal handshake error — do not retry. + # Delete the session so pending procedures get + # UNEXPECTED_DISCONNECT via the sessionStatus "closing" event. + self._events.dispatch( + "protocolError", + { + "type": "handshake_failed", + "message": reason, + "code": code, + }, + ) + self._delete_session(to) + return + + # Check session ID match + resp_session_id = status.get("sessionId") + if resp_session_id != session.id: + # Server assigned a different session - old session is stale + logger.debug( + "Session ID mismatch: expected %s, got %s", + session.id, + resp_session_id, + ) + await ws.close() + # The server lost our session state; destroy old and create new + self._delete_session(to, emit_closing=True) + self._try_reconnecting(to) + return + + # Handshake successful + loop = self._get_loop() + session.set_connected(ws, loop) + self._events.dispatch( + "sessionTransition", + {"state": SessionState.CONNECTED, "id": session.id}, + ) + + # Retransmit buffered messages + ok, err = session.send_buffered_messages() + if not ok: + logger.error("Failed to send buffered messages: %s", err) + self._events.dispatch( + "protocolError", + {"type": "message_send_failure", "message": err}, + ) + self._delete_session(to) + return + + # Start restoring retry budget + self._retry_budget.start_restoring_budget() + + # Start listening for messages + self._start_message_listener(session, ws, to) + + def _start_message_listener(self, session: Session, ws: Any, to: str) -> None: + """Start the async message listener on the WebSocket.""" + loop = self._get_loop() + + session._on_connection_closed = lambda: self._on_connection_dropped(to) + + async def _listen(): + try: + async for raw_msg in ws: + if session._destroyed: + break + if isinstance(raw_msg, str): + raw_msg = raw_msg.encode("utf-8") + self._on_message_data(session, raw_msg, to) + except Exception as e: + if not session._destroyed: + logger.debug("WebSocket error for session %s: %s", session.id, e) + finally: + if not session._destroyed: + self._on_connection_dropped(to) + + loop.create_task(_listen()) + + def _on_message_data(self, session: Session, raw: bytes, to: str) -> None: + """Handle raw bytes received from the WebSocket.""" + ok, result = self._codec_adapter.from_buffer(raw) + if not ok: + # Invalid message is fatal — destroy the session + self._events.dispatch( + "protocolError", + {"type": "invalid_message", "message": result}, + ) + self._delete_session(to) + return + + msg: TransportMessage = result # type: ignore[assignment] + + # Check message ordering + if msg.seq != session.ack: + if msg.seq < session.ack: + # Duplicate - discard silently + return + else: + # Future message - close connection to force re-handshake + logger.debug( + "Seq out of order: expected %d, got %d. Closing.", + session.ack, + msg.seq, + ) + if session._ws: + asyncio.ensure_future(session._ws.close()) + return + + # Update bookkeeping + session.update_bookkeeping(msg.ack, msg.seq) + + # Dispatch non-heartbeat messages + if not is_ack(msg.control_flags): + self._events.dispatch("message", msg) + return + + # If this is a heartbeat and we're not actively heartbeating (client), + # echo back + if not session._is_actively_heartbeating: + session.send_heartbeat() + + def _on_connection_dropped(self, to: str) -> None: + """Handle a dropped connection.""" + session = self.sessions.get(to) + if session is None or session._destroyed: + return + if session.state != SessionState.CONNECTED: + return + + loop = self._get_loop() + session.set_disconnected(loop) + self._events.dispatch( + "sessionTransition", + {"state": SessionState.NO_CONNECTION, "id": session.id}, + ) + + if self._reconnect_on_connection_drop: + self._try_reconnecting(to) + + def _on_connection_failed(self, to: str) -> None: + """Handle a failed connection attempt.""" + session = self.sessions.get(to) + if session is None or session._destroyed: + return + + # Transition to NoConnection with grace period so the session + # is eventually destroyed if reconnect doesn't succeed. + # Only start the grace period if one isn't already running, + # so repeated failures don't keep extending the deadline. + loop = self._get_loop() + session.state = SessionState.NO_CONNECTION + if session._grace_period_task is None or session._grace_period_task.done(): + session.start_grace_period(loop) + + if self._reconnect_on_connection_drop: + self._try_reconnecting(to) + + def _try_reconnecting(self, to: str) -> None: + """Try to reconnect to the server.""" + if self._status != "open": + return + if not self._reconnect_on_connection_drop: + return + # Use call_soon to break out of the current call stack + loop = self._get_loop() + loop.call_soon(lambda: self.connect(to)) + + # --- Session-Bound Send --- + + def get_session_bound_send_fn( + self, to: str, session_id: str + ) -> Callable[[PartialTransportMessage], str]: + """Get a send function scoped to a specific session. + + The send function will raise if the session has been replaced or destroyed. + """ + + def _send(msg: PartialTransportMessage) -> str: + session = self.sessions.get(to) + if session is None: + raise RuntimeError("Session scope ended (closed)") + if session.id != session_id or session._destroyed: + raise RuntimeError("Session scope ended (transition)") + + ok, result = session.send(msg) + if not ok: + # Send failure is fatal — destroy session + self._events.dispatch( + "protocolError", + {"type": "message_send_failure", "message": result}, + ) + self._delete_session(to) + raise RuntimeError(f"Send failed: {result}") + return result + + return _send + + # --- Lifecycle --- + + async def close(self) -> None: + """Close the transport and all sessions.""" + if self._status == "closed": + return + self._status = "closed" + + # Cancel all pending connection tasks + for task in self._connect_tasks.values(): + task.cancel() + self._connect_tasks.clear() + + # Delete all sessions + for to in list(self.sessions.keys()): + self._delete_session(to) + + self._retry_budget.reset() + self._events.dispatch("transportStatus", {"status": "closed"}) + + @property + def reconnect_on_connection_drop(self) -> bool: + return self._reconnect_on_connection_drop + + @reconnect_on_connection_drop.setter + def reconnect_on_connection_drop(self, value: bool) -> None: + self._reconnect_on_connection_drop = value diff --git a/python-client/river/types.py b/python-client/river/types.py new file mode 100644 index 00000000..5ebb3fbe --- /dev/null +++ b/python-client/river/types.py @@ -0,0 +1,296 @@ +"""Core types for the River protocol.""" + +from __future__ import annotations + +import random +import string +from dataclasses import dataclass, field +from enum import IntFlag +from typing import Any, Generic, TypeVar, Union + +# --- ID Generation --- + +_ID_ALPHABET = string.ascii_letters + string.digits +_ID_LENGTH = 12 + + +def generate_id() -> str: + """Generate a nanoid-style random ID (12 chars, alphanumeric).""" + return "".join(random.choices(_ID_ALPHABET, k=_ID_LENGTH)) + + +# --- Control Flags --- + + +class ControlFlags(IntFlag): + """Bit flags for transport message control signals.""" + + AckBit = 0b00001 # 1 - heartbeat/ack only + StreamOpenBit = 0b00010 # 2 - first message of a stream + StreamCancelBit = 0b00100 # 4 - abrupt cancel with ProtocolError payload + StreamClosedBit = 0b01000 # 8 - last message of a stream + + +def is_ack(flags: int) -> bool: + return (flags & ControlFlags.AckBit) == ControlFlags.AckBit + + +def is_stream_open(flags: int) -> bool: + return (flags & ControlFlags.StreamOpenBit) == ControlFlags.StreamOpenBit + + +def is_stream_cancel(flags: int) -> bool: + return (flags & ControlFlags.StreamCancelBit) == ControlFlags.StreamCancelBit + + +def is_stream_close(flags: int) -> bool: + return (flags & ControlFlags.StreamClosedBit) == ControlFlags.StreamClosedBit + + +# --- Transport Message --- + + +@dataclass +class TransportMessage: + """The envelope for all messages sent over the wire.""" + + id: str + from_: str # 'from' is a Python keyword + to: str + seq: int + ack: int + payload: Any + stream_id: str + control_flags: int = 0 + service_name: str | None = None + procedure_name: str | None = None + tracing: dict[str, str] | None = None + + def to_dict(self) -> dict[str, Any]: + """Serialize to a dict matching the wire format.""" + d: dict[str, Any] = { + "id": self.id, + "from": self.from_, + "to": self.to, + "seq": self.seq, + "ack": self.ack, + "payload": self.payload, + "streamId": self.stream_id, + "controlFlags": self.control_flags, + } + if self.service_name is not None: + d["serviceName"] = self.service_name + if self.procedure_name is not None: + d["procedureName"] = self.procedure_name + if self.tracing is not None: + d["tracing"] = self.tracing + return d + + @classmethod + def from_dict(cls, d: dict[str, Any]) -> TransportMessage: + """Deserialize from a wire format dict. + + Raises ``TypeError`` if required fields have wrong types. + """ + required_str = { + "id": "id", + "from": "from", + "to": "to", + "streamId": "streamId", + } + for wire_key, label in required_str.items(): + if wire_key not in d: + raise KeyError(f"Missing required field: {label}") + if not isinstance(d[wire_key], str): + raise TypeError( + f"Field '{label}' must be str, got {type(d[wire_key]).__name__}" + ) + + required_int = {"seq": "seq", "ack": "ack"} + for wire_key, label in required_int.items(): + if wire_key not in d: + raise KeyError(f"Missing required field: {label}") + # bool is a subclass of int in Python — reject it explicitly + if isinstance(d[wire_key], bool) or not isinstance(d[wire_key], int): + raise TypeError( + f"Field '{label}' must be int, got {type(d[wire_key]).__name__}" + ) + + if "payload" not in d: + raise KeyError("Missing required field: payload") + + if "controlFlags" not in d: + raise KeyError("Missing required field: controlFlags") + control_flags = d["controlFlags"] + if isinstance(control_flags, bool) or not isinstance(control_flags, int): + raise TypeError( + f"Field 'controlFlags' must be int, got {type(control_flags).__name__}" + ) + + return cls( + id=d["id"], + from_=d["from"], + to=d["to"], + seq=d["seq"], + ack=d["ack"], + payload=d["payload"], + stream_id=d["streamId"], + control_flags=control_flags, + service_name=d.get("serviceName"), + procedure_name=d.get("procedureName"), + tracing=d.get("tracing"), + ) + + +@dataclass +class PartialTransportMessage: + """A transport message missing id, from, to, seq, ack -- filled in by Session.""" + + payload: Any + stream_id: str + control_flags: int = 0 + service_name: str | None = None + procedure_name: str | None = None + tracing: dict[str, str] | None = None + + +# --- Result Types --- + +T = TypeVar("T") +E = TypeVar("E") + + +@dataclass +class OkResult(Generic[T]): + """Success result.""" + + payload: T + ok: bool = field(default=True, init=False) + + +@dataclass +class ErrResult(Generic[E]): + """Error result.""" + + payload: E + ok: bool = field(default=False, init=False) + + +Result = Union[OkResult[T], ErrResult[E]] + + +def Ok(payload: Any) -> OkResult: + """Create an Ok result.""" + return OkResult(payload=payload) + + +def Err(payload: Any) -> ErrResult: + """Create an Err result.""" + return ErrResult(payload=payload) + + +def ok_result(payload: Any) -> dict[str, Any]: + """Create an ok result dict for wire format.""" + return {"ok": True, "payload": payload} + + +def err_result(code: str, message: str, extras: Any = None) -> dict[str, Any]: + """Create an error result dict for wire format.""" + p: dict[str, Any] = {"code": code, "message": message} + if extras is not None: + p["extras"] = extras + return {"ok": False, "payload": p} + + +# --- Protocol Error Codes --- + +UNEXPECTED_DISCONNECT_CODE = "UNEXPECTED_DISCONNECT" +CANCEL_CODE = "CANCEL" +UNCAUGHT_ERROR_CODE = "UNCAUGHT_ERROR" +INVALID_REQUEST_CODE = "INVALID_REQUEST" + +# --- Protocol Version --- + +PROTOCOL_VERSION = "v2.0" + + +# --- Control Message Helpers --- + + +def handshake_request_payload( + session_id: str, + next_expected_seq: int, + next_sent_seq: int, + metadata: Any = None, +) -> dict[str, Any]: + """Create a handshake request payload.""" + payload: dict[str, Any] = { + "type": "HANDSHAKE_REQ", + "protocolVersion": PROTOCOL_VERSION, + "sessionId": session_id, + "expectedSessionState": { + "nextExpectedSeq": next_expected_seq, + "nextSentSeq": next_sent_seq, + }, + } + if metadata is not None: + payload["metadata"] = metadata + return payload + + +def handshake_response_ok(session_id: str) -> dict[str, Any]: + return { + "type": "HANDSHAKE_RESP", + "status": {"ok": True, "sessionId": session_id}, + } + + +def ack_payload() -> dict[str, str]: + """Heartbeat/ACK control payload.""" + return {"type": "ACK"} + + +def close_payload() -> dict[str, str]: + """Stream close control payload.""" + return {"type": "CLOSE"} + + +def close_stream_message(stream_id: str) -> PartialTransportMessage: + """Create a close stream partial message.""" + return PartialTransportMessage( + payload=close_payload(), + stream_id=stream_id, + control_flags=ControlFlags.StreamClosedBit, + ) + + +def cancel_message(stream_id: str, error_payload: dict) -> PartialTransportMessage: + """Create a cancel stream partial message.""" + return PartialTransportMessage( + payload=error_payload, + stream_id=stream_id, + control_flags=ControlFlags.StreamCancelBit, + ) + + +def heartbeat_message() -> PartialTransportMessage: + """Create a heartbeat partial message.""" + return PartialTransportMessage( + payload=ack_payload(), + stream_id="heartbeat", + control_flags=ControlFlags.AckBit, + ) + + +# --- Handshake Error Codes --- + +RETRIABLE_HANDSHAKE_CODES = frozenset({"SESSION_STATE_MISMATCH"}) +FATAL_HANDSHAKE_CODES = frozenset( + { + "MALFORMED_HANDSHAKE_META", + "MALFORMED_HANDSHAKE", + "PROTOCOL_VERSION_MISMATCH", + "REJECTED_BY_CUSTOM_HANDLER", + "REJECTED_UNSUPPORTED_CLIENT", + } +) diff --git a/python-client/tests/__init__.py b/python-client/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/python-client/tests/conftest.py b/python-client/tests/conftest.py new file mode 100644 index 00000000..74c622dc --- /dev/null +++ b/python-client/tests/conftest.py @@ -0,0 +1,219 @@ +"""Pytest fixtures for River Python client tests. + +Manages the lifecycle of TypeScript test server processes that the +Python client connects to. +""" + +from __future__ import annotations + +import os +import re +import selectors +import signal +import subprocess +import sys +import time +from typing import Generator + +import pytest + +from river.codec import BinaryCodec, Codec + +TESTS_DIR = os.path.dirname(__file__) +SERVER_TS = os.path.join(TESTS_DIR, "test_server.ts") +SERVER_MJS = os.path.join(TESTS_DIR, "test_server.mjs") +HANDSHAKE_SERVER_TS = os.path.join(TESTS_DIR, "test_server_handshake.ts") +HANDSHAKE_SERVER_MJS = os.path.join(TESTS_DIR, "test_server_handshake.mjs") +EXTRACT_SCHEMA_TS = os.path.join(TESTS_DIR, "extract_test_schema.ts") +EXTRACT_SCHEMA_MJS = os.path.join(TESTS_DIR, "extract_test_schema.mjs") +SCHEMA_JSON = os.path.join(TESTS_DIR, "test_schema.json") +GENERATED_DIR = os.path.join(TESTS_DIR, "generated") +RIVER_ROOT = os.path.abspath(os.path.join(TESTS_DIR, "..", "..")) +ESBUILD = os.path.join(RIVER_ROOT, "node_modules", ".bin", "esbuild") + + +def _esbuild_bundle(ts_path: str, mjs_path: str) -> None: + """Bundle a .ts file to .mjs using esbuild.""" + result = subprocess.run( + [ + ESBUILD, + ts_path, + "--bundle", + "--platform=node", + "--format=esm", + f"--outfile={mjs_path}", + # keep heavy deps external so the bundle stays small and + # we reuse whatever is already in node_modules + "--external:ws", + "--external:@sinclair/typebox", + "--external:@msgpack/msgpack", + ], + cwd=RIVER_ROOT, + capture_output=True, + text=True, + ) + if result.returncode != 0: + raise RuntimeError(f"esbuild failed ({result.returncode}):\n{result.stderr}") + + +def _build_test_server() -> None: + """Bundle test_server.ts -> test_server.mjs using esbuild.""" + _esbuild_bundle(SERVER_TS, SERVER_MJS) + + +def _build_handshake_server() -> None: + """Bundle test_server_handshake.ts -> test_server_handshake.mjs using esbuild.""" + _esbuild_bundle(HANDSHAKE_SERVER_TS, HANDSHAKE_SERVER_MJS) + + +def _extract_test_schema() -> None: + """Bundle and run extract_test_schema.ts to produce test_schema.json, + then run codegen to produce the generated client module.""" + _esbuild_bundle(EXTRACT_SCHEMA_TS, EXTRACT_SCHEMA_MJS) + result = subprocess.run( + ["node", EXTRACT_SCHEMA_MJS], + cwd=RIVER_ROOT, + capture_output=True, + text=True, + ) + if result.returncode != 0: + raise RuntimeError( + f"extract_test_schema failed ({result.returncode}):\n{result.stderr}" + ) + + # Run codegen + result = subprocess.run( + [ + sys.executable, + "-m", + "river.codegen", + "--schema", + SCHEMA_JSON, + "--output", + GENERATED_DIR, + ], + cwd=os.path.join(RIVER_ROOT, "python-client"), + capture_output=True, + text=True, + ) + if result.returncode != 0: + raise RuntimeError( + f"codegen failed ({result.returncode}):\n{result.stderr}\n{result.stdout}" + ) + + +def _start_server( + mjs_path: str, + label: str, + env: dict[str, str] | None = None, +) -> tuple[subprocess.Popen, int]: + """Start a Node.js server process and return (proc, port).""" + full_env = {**os.environ, **(env or {})} + proc = subprocess.Popen( + ["node", mjs_path], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=RIVER_ROOT, + env=full_env, + ) + + port = None + deadline = time.monotonic() + 30 + assert proc.stdout is not None + sel = selectors.DefaultSelector() + sel.register(proc.stdout, selectors.EVENT_READ) + buf = b"" + try: + while time.monotonic() < deadline: + remaining = deadline - time.monotonic() + if remaining <= 0: + break + ready = sel.select(timeout=min(remaining, 1.0)) + if not ready: + if proc.poll() is not None: + stderr = proc.stderr.read().decode("utf-8") if proc.stderr else "" + raise RuntimeError( + f"{label} exited with code {proc.returncode}.\nstderr: {stderr}" + ) + continue + chunk = proc.stdout.read1(4096) # type: ignore[union-attr] + if not chunk: + # EOF — child closed stdout (likely exited) + if proc.poll() is not None: + stderr = proc.stderr.read().decode("utf-8") if proc.stderr else "" + raise RuntimeError( + f"{label} exited with code {proc.returncode}.\nstderr: {stderr}" + ) + continue + buf += chunk + while b"\n" in buf: + line_bytes, buf = buf.split(b"\n", 1) + line = line_bytes.decode("utf-8").strip() + m = re.match(r"RIVER_PORT=(\d+)", line) + if m: + port = int(m.group(1)) + break + if port is not None: + break + finally: + sel.unregister(proc.stdout) + sel.close() + + if port is None: + proc.kill() + raise RuntimeError(f"Failed to get port from {label} within 30s") + + return proc, port + + +def _stop_server(proc: subprocess.Popen) -> None: + proc.send_signal(signal.SIGTERM) + try: + proc.wait(timeout=5) + except subprocess.TimeoutExpired: + proc.kill() + + +@pytest.fixture(scope="session") +def generated_client_dir() -> str: + """Extract test schema and run codegen. Returns the generated dir path.""" + _extract_test_schema() + return GENERATED_DIR + + +@pytest.fixture(scope="session") +def river_server_port() -> Generator[int, None, None]: + """Build and start the TypeScript test server, yield its port.""" + _build_test_server() + proc, port = _start_server(SERVER_MJS, "Test server", env={"RIVER_CODEC": "binary"}) + yield port + _stop_server(proc) + + +@pytest.fixture +def server_url(river_server_port: int) -> str: + """Return the WebSocket URL for the test server.""" + return f"ws://127.0.0.1:{river_server_port}" + + +@pytest.fixture(scope="session") +def river_handshake_server_port() -> Generator[int, None, None]: + """Build and start the handshake test server, yield its port.""" + _build_handshake_server() + proc, port = _start_server(HANDSHAKE_SERVER_MJS, "Handshake test server") + yield port + _stop_server(proc) + + +@pytest.fixture +def handshake_server_url(river_handshake_server_port: int) -> str: + """Return the WebSocket URL for the handshake test server.""" + return f"ws://127.0.0.1:{river_handshake_server_port}" + + +@pytest.fixture +def codec_and_url( + river_server_port: int, +) -> tuple[Codec, str]: + """Return (BinaryCodec(), server_url).""" + return BinaryCodec(), f"ws://127.0.0.1:{river_server_port}" diff --git a/python-client/tests/extract_test_schema.ts b/python-client/tests/extract_test_schema.ts new file mode 100644 index 00000000..ae6f00cd --- /dev/null +++ b/python-client/tests/extract_test_schema.ts @@ -0,0 +1,286 @@ +/** + * Extract the test server schema to a JSON file for codegen tests. + * + * Defines the same service schemas as test_server.ts but with stub + * handlers — only the type shapes matter for serialization. + * + * Usage (from river repo root, after esbuild bundle): + * node python-client/tests/extract_test_schema.mjs + */ +import fs from 'node:fs'; +import path from 'node:path'; +import { + createServiceSchema, + Procedure, + Ok, + serializeSchema, +} from '../../router'; +import { Type } from '@sinclair/typebox'; + +const ServiceSchema = createServiceSchema(); + +const RecursivePayload = Type.Recursive((This) => + Type.Object({ + value: Type.String(), + children: Type.Optional(Type.Array(This)), + }), +); + +const TestServiceSchema = ServiceSchema.define({ + add: Procedure.rpc({ + requestInit: Type.Object({ n: Type.Number() }), + responseData: Type.Object({ result: Type.Number() }), + responseError: Type.Never(), + async handler({ reqInit }) { + return Ok({ result: reqInit.n }); + }, + }), + echo: Procedure.stream({ + requestInit: Type.Object({}), + requestData: Type.Object({ + msg: Type.String(), + ignore: Type.Optional(Type.Boolean()), + }), + responseData: Type.Object({ response: Type.String() }), + responseError: Type.Never(), + async handler({ resWritable }) { + resWritable.close(); + }, + }), + echoWithPrefix: Procedure.stream({ + requestInit: Type.Object({ prefix: Type.String() }), + requestData: Type.Object({ + msg: Type.String(), + ignore: Type.Optional(Type.Boolean()), + }), + responseData: Type.Object({ response: Type.String() }), + responseError: Type.Never(), + async handler({ resWritable }) { + resWritable.close(); + }, + }), + echoBinary: Procedure.rpc({ + requestInit: Type.Object({ data: Type.Uint8Array() }), + responseData: Type.Object({ + data: Type.Uint8Array(), + length: Type.Number(), + }), + responseError: Type.Never(), + async handler({ reqInit }) { + return Ok({ data: reqInit.data, length: reqInit.data.length }); + }, + }), + echoRecursive: Procedure.rpc({ + requestInit: RecursivePayload, + responseData: RecursivePayload, + responseError: Type.Never(), + async handler({ reqInit }) { + return Ok(reqInit); + }, + }), +}); + +const OrderingServiceSchema = ServiceSchema.define({ + add: Procedure.rpc({ + requestInit: Type.Object({ n: Type.Number() }), + responseData: Type.Object({ n: Type.Number() }), + responseError: Type.Never(), + async handler({ reqInit }) { + return Ok({ n: reqInit.n }); + }, + }), + getAll: Procedure.rpc({ + requestInit: Type.Object({}), + responseData: Type.Object({ msgs: Type.Array(Type.Number()) }), + responseError: Type.Never(), + async handler(_ctx) { + return Ok({ msgs: [] as Array }); + }, + }), +}); + +const FallibleServiceSchema = ServiceSchema.define({ + divide: Procedure.rpc({ + requestInit: Type.Object({ a: Type.Number(), b: Type.Number() }), + responseData: Type.Object({ result: Type.Number() }), + responseError: Type.Union([ + Type.Object({ + code: Type.Literal('DIV_BY_ZERO'), + message: Type.String(), + }), + Type.Object({ + code: Type.Literal('INFINITY'), + message: Type.String(), + }), + ]), + async handler({ reqInit }) { + return Ok({ result: reqInit.a / reqInit.b }); + }, + }), + echo: Procedure.stream({ + requestInit: Type.Object({}), + requestData: Type.Object({ + msg: Type.String(), + throwResult: Type.Optional(Type.Boolean()), + throwError: Type.Optional(Type.Boolean()), + }), + responseData: Type.Object({ response: Type.String() }), + responseError: Type.Object({ + code: Type.Literal('STREAM_ERROR'), + message: Type.String(), + }), + async handler({ resWritable }) { + resWritable.close(); + }, + }), +}); + +const SubscribableServiceSchema = ServiceSchema.define({ + add: Procedure.rpc({ + requestInit: Type.Object({ n: Type.Number() }), + responseData: Type.Object({ result: Type.Number() }), + responseError: Type.Never(), + async handler({ reqInit }) { + return Ok({ result: reqInit.n }); + }, + }), + value: Procedure.subscription({ + requestInit: Type.Object({}), + responseData: Type.Object({ count: Type.Number() }), + responseError: Type.Never(), + async handler({ resWritable }) { + resWritable.write(Ok({ count: 0 })); + resWritable.close(); + }, + }), +}); + +const UploadableServiceSchema = ServiceSchema.define({ + addMultiple: Procedure.upload({ + requestInit: Type.Object({}), + requestData: Type.Object({ n: Type.Number() }), + responseData: Type.Object({ result: Type.Number() }), + responseError: Type.Never(), + async handler(_ctx) { + return Ok({ result: 0 }); + }, + }), + addMultipleWithPrefix: Procedure.upload({ + requestInit: Type.Object({ prefix: Type.String() }), + requestData: Type.Object({ n: Type.Number() }), + responseData: Type.Object({ result: Type.String() }), + responseError: Type.Never(), + async handler(_ctx) { + return Ok({ result: '' }); + }, + }), + cancellableAdd: Procedure.upload({ + requestInit: Type.Object({}), + requestData: Type.Object({ n: Type.Number() }), + responseData: Type.Object({ result: Type.Number() }), + responseError: Type.Object({ + code: Type.Literal('CANCEL'), + message: Type.String(), + }), + async handler(_ctx) { + return Ok({ result: 0 }); + }, + }), +}); + +const CancellationServiceSchema = ServiceSchema.define({ + blockingRpc: Procedure.rpc({ + requestInit: Type.Object({}), + responseData: Type.Object({}), + responseError: Type.Never(), + async handler(_ctx) { + return Ok({}); + }, + }), + blockingStream: Procedure.stream({ + requestInit: Type.Object({}), + requestData: Type.Object({}), + responseData: Type.Object({}), + responseError: Type.Never(), + async handler({ resWritable }) { + resWritable.close(); + }, + }), + blockingUpload: Procedure.upload({ + requestInit: Type.Object({}), + requestData: Type.Object({}), + responseData: Type.Object({}), + responseError: Type.Never(), + async handler(_ctx) { + return Ok({}); + }, + }), + blockingSubscription: Procedure.subscription({ + requestInit: Type.Object({}), + responseData: Type.Object({}), + responseError: Type.Never(), + async handler({ resWritable }) { + resWritable.close(); + }, + }), + immediateRpc: Procedure.rpc({ + requestInit: Type.Object({}), + responseData: Type.Object({ done: Type.Boolean() }), + responseError: Type.Never(), + async handler(_ctx) { + return Ok({ done: true }); + }, + }), + immediateStream: Procedure.stream({ + requestInit: Type.Object({}), + requestData: Type.Object({}), + responseData: Type.Object({ done: Type.Boolean() }), + responseError: Type.Never(), + async handler({ resWritable }) { + resWritable.close(); + }, + }), + immediateUpload: Procedure.upload({ + requestInit: Type.Object({}), + requestData: Type.Object({}), + responseData: Type.Object({ done: Type.Boolean() }), + responseError: Type.Never(), + async handler(_ctx) { + return Ok({ done: true }); + }, + }), + immediateSubscription: Procedure.subscription({ + requestInit: Type.Object({}), + responseData: Type.Object({ done: Type.Boolean() }), + responseError: Type.Never(), + async handler({ resWritable }) { + resWritable.close(); + }, + }), + countedStream: Procedure.stream({ + requestInit: Type.Object({ total: Type.Number() }), + requestData: Type.Object({}), + responseData: Type.Object({ i: Type.Number() }), + responseError: Type.Never(), + async handler({ resWritable }) { + resWritable.close(); + }, + }), +}); + +const services = { + test: TestServiceSchema, + ordering: OrderingServiceSchema, + fallible: FallibleServiceSchema, + subscribable: SubscribableServiceSchema, + uploadable: UploadableServiceSchema, + cancel: CancellationServiceSchema, +}; + +const schema = serializeSchema(services); +const outPath = path.join( + path.dirname(new URL(import.meta.url).pathname), + 'test_schema.json', +); +fs.writeFileSync(outPath, JSON.stringify(schema, null, 2)); +console.log(`Wrote schema to ${outPath}`); diff --git a/python-client/tests/generated/__init__.py b/python-client/tests/generated/__init__.py new file mode 100644 index 00000000..0668cffd --- /dev/null +++ b/python-client/tests/generated/__init__.py @@ -0,0 +1,17 @@ +"""Generated River service clients.""" + +from .cancel_client import CancelClient +from .fallible_client import FallibleClient +from .ordering_client import OrderingClient +from .subscribable_client import SubscribableClient +from .test_client import TestClient +from .uploadable_client import UploadableClient + +__all__ = [ + "CancelClient", + "FallibleClient", + "OrderingClient", + "SubscribableClient", + "TestClient", + "UploadableClient", +] diff --git a/python-client/tests/generated/_errors.py b/python-client/tests/generated/_errors.py new file mode 100644 index 00000000..ba3e8c37 --- /dev/null +++ b/python-client/tests/generated/_errors.py @@ -0,0 +1,45 @@ +"""Protocol-level error types for the River protocol. + +These errors can be returned by any procedure regardless of its +service-specific error schema. +""" + +from __future__ import annotations + +from typing import Literal + +from typing_extensions import NotRequired, TypedDict + + +class UncaughtError(TypedDict): + code: Literal["UNCAUGHT_ERROR"] + message: str + + +class UnexpectedDisconnect(TypedDict): + code: Literal["UNEXPECTED_DISCONNECT"] + message: str + + +class InvalidRequestExtrasItem(TypedDict): + path: str + message: str + + +class InvalidRequestExtras(TypedDict): + firstValidationErrors: list[InvalidRequestExtrasItem] + totalErrors: float + + +class InvalidRequest(TypedDict): + code: Literal["INVALID_REQUEST"] + message: str + extras: NotRequired[InvalidRequestExtras] + + +class Cancel(TypedDict): + code: Literal["CANCEL"] + message: str + + +ProtocolError = UncaughtError | UnexpectedDisconnect | InvalidRequest | Cancel diff --git a/python-client/tests/generated/_types.py b/python-client/tests/generated/_types.py new file mode 100644 index 00000000..fc8c8906 --- /dev/null +++ b/python-client/tests/generated/_types.py @@ -0,0 +1,258 @@ +"""Generated type definitions for River services.""" + +from __future__ import annotations + +from typing import Literal + +from typing_extensions import NotRequired, TypedDict + + +class TestAddInit(TypedDict): + n: float + + +class TestAddOutput(TypedDict): + result: float + + +class TestEchoInit(TypedDict): + pass + + +class TestEchoInput(TypedDict): + msg: str + ignore: NotRequired[bool] + + +class TestEchoOutput(TypedDict): + response: str + + +class TestEchoWithPrefixInit(TypedDict): + prefix: str + + +class TestEchoWithPrefixInput(TypedDict): + msg: str + ignore: NotRequired[bool] + + +class TestEchoWithPrefixOutput(TypedDict): + response: str + + +class TestEchoBinaryInit(TypedDict): + data: bytes + + +class TestEchoBinaryOutput(TypedDict): + data: bytes + length: float + + +class TestEchoRecursiveInit(TypedDict): + value: str + children: NotRequired[list[TestEchoRecursiveInit]] + + +class TestEchoRecursiveOutput(TypedDict): + value: str + children: NotRequired[list[TestEchoRecursiveOutput]] + + +class OrderingAddInit(TypedDict): + n: float + + +class OrderingAddOutput(TypedDict): + n: float + + +class OrderingGetAllInit(TypedDict): + pass + + +class OrderingGetAllOutput(TypedDict): + msgs: list[float] + + +class FallibleDivideInit(TypedDict): + a: float + b: float + + +class FallibleDivideOutput(TypedDict): + result: float + + +class FallibleDivideErrorDivByZero(TypedDict): + code: Literal["DIV_BY_ZERO"] + message: str + + +class FallibleDivideErrorInfinity(TypedDict): + code: Literal["INFINITY"] + message: str + + +class FallibleEchoInit(TypedDict): + pass + + +class FallibleEchoInput(TypedDict): + msg: str + throwResult: NotRequired[bool] + throwError: NotRequired[bool] + + +class FallibleEchoOutput(TypedDict): + response: str + + +class FallibleEchoError(TypedDict): + code: Literal["STREAM_ERROR"] + message: str + + +class SubscribableAddInit(TypedDict): + n: float + + +class SubscribableAddOutput(TypedDict): + result: float + + +class SubscribableValueInit(TypedDict): + pass + + +class SubscribableValueOutput(TypedDict): + count: float + + +class UploadableAddMultipleInit(TypedDict): + pass + + +class UploadableAddMultipleInput(TypedDict): + n: float + + +class UploadableAddMultipleOutput(TypedDict): + result: float + + +class UploadableAddMultipleWithPrefixInit(TypedDict): + prefix: str + + +class UploadableAddMultipleWithPrefixInput(TypedDict): + n: float + + +class UploadableAddMultipleWithPrefixOutput(TypedDict): + result: str + + +class UploadableCancellableAddInit(TypedDict): + pass + + +class UploadableCancellableAddInput(TypedDict): + n: float + + +class UploadableCancellableAddOutput(TypedDict): + result: float + + +class CancelBlockingRpcInit(TypedDict): + pass + + +class CancelBlockingRpcOutput(TypedDict): + pass + + +class CancelBlockingStreamInit(TypedDict): + pass + + +class CancelBlockingStreamInput(TypedDict): + pass + + +class CancelBlockingStreamOutput(TypedDict): + pass + + +class CancelBlockingUploadInit(TypedDict): + pass + + +class CancelBlockingUploadInput(TypedDict): + pass + + +class CancelBlockingUploadOutput(TypedDict): + pass + + +class CancelBlockingSubscriptionInit(TypedDict): + pass + + +class CancelBlockingSubscriptionOutput(TypedDict): + pass + + +class CancelImmediateRpcInit(TypedDict): + pass + + +class CancelImmediateRpcOutput(TypedDict): + done: bool + + +class CancelImmediateStreamInit(TypedDict): + pass + + +class CancelImmediateStreamInput(TypedDict): + pass + + +class CancelImmediateStreamOutput(TypedDict): + done: bool + + +class CancelImmediateUploadInit(TypedDict): + pass + + +class CancelImmediateUploadInput(TypedDict): + pass + + +class CancelImmediateUploadOutput(TypedDict): + done: bool + + +class CancelImmediateSubscriptionInit(TypedDict): + pass + + +class CancelImmediateSubscriptionOutput(TypedDict): + done: bool + + +class CancelCountedStreamInit(TypedDict): + total: float + + +class CancelCountedStreamInput(TypedDict): + pass + + +class CancelCountedStreamOutput(TypedDict): + i: float diff --git a/python-client/tests/generated/cancel_client.py b/python-client/tests/generated/cancel_client.py new file mode 100644 index 00000000..6ba531da --- /dev/null +++ b/python-client/tests/generated/cancel_client.py @@ -0,0 +1,167 @@ +"""Generated client for the cancel service.""" + +from __future__ import annotations + +import asyncio +from typing import Any + +from river.client import ( + ErrResult, + OkResult, + RiverClient, + StreamResult, + SubscriptionResult, + UploadResult, +) + +from ._types import ( + CancelBlockingRpcInit, + CancelBlockingRpcOutput, + CancelBlockingStreamInit, + CancelBlockingStreamInput, + CancelBlockingStreamOutput, + CancelBlockingSubscriptionInit, + CancelBlockingSubscriptionOutput, + CancelBlockingUploadInit, + CancelBlockingUploadInput, + CancelBlockingUploadOutput, + CancelCountedStreamInit, + CancelCountedStreamInput, + CancelCountedStreamOutput, + CancelImmediateRpcInit, + CancelImmediateRpcOutput, + CancelImmediateStreamInit, + CancelImmediateStreamInput, + CancelImmediateStreamOutput, + CancelImmediateSubscriptionInit, + CancelImmediateSubscriptionOutput, + CancelImmediateUploadInit, + CancelImmediateUploadInput, + CancelImmediateUploadOutput, +) + +from ._errors import ProtocolError + + +class CancelClient: + """Typed client for the ``cancel`` service.""" + + def __init__(self, client: RiverClient) -> None: + self._client = client + + async def blocking_rpc( + self, + init: CancelBlockingRpcInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> OkResult[CancelBlockingRpcOutput] | ErrResult[ProtocolError]: + return await self._client.rpc( + "cancel", + "blockingRpc", + init, + abort_signal=abort_signal, + ) + + def blocking_stream( + self, + init: CancelBlockingStreamInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> StreamResult[CancelBlockingStreamInput, OkResult[CancelBlockingStreamOutput] | ErrResult[ProtocolError]]: + return self._client.stream( + "cancel", + "blockingStream", + init, + abort_signal=abort_signal, + ) + + def blocking_upload( + self, + init: CancelBlockingUploadInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> UploadResult[CancelBlockingUploadInput, OkResult[CancelBlockingUploadOutput] | ErrResult[ProtocolError]]: + return self._client.upload( + "cancel", + "blockingUpload", + init, + abort_signal=abort_signal, + ) + + def blocking_subscription( + self, + init: CancelBlockingSubscriptionInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> SubscriptionResult[OkResult[CancelBlockingSubscriptionOutput] | ErrResult[ProtocolError]]: + return self._client.subscribe( + "cancel", + "blockingSubscription", + init, + abort_signal=abort_signal, + ) + + async def immediate_rpc( + self, + init: CancelImmediateRpcInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> OkResult[CancelImmediateRpcOutput] | ErrResult[ProtocolError]: + return await self._client.rpc( + "cancel", + "immediateRpc", + init, + abort_signal=abort_signal, + ) + + def immediate_stream( + self, + init: CancelImmediateStreamInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> StreamResult[CancelImmediateStreamInput, OkResult[CancelImmediateStreamOutput] | ErrResult[ProtocolError]]: + return self._client.stream( + "cancel", + "immediateStream", + init, + abort_signal=abort_signal, + ) + + def immediate_upload( + self, + init: CancelImmediateUploadInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> UploadResult[CancelImmediateUploadInput, OkResult[CancelImmediateUploadOutput] | ErrResult[ProtocolError]]: + return self._client.upload( + "cancel", + "immediateUpload", + init, + abort_signal=abort_signal, + ) + + def immediate_subscription( + self, + init: CancelImmediateSubscriptionInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> SubscriptionResult[OkResult[CancelImmediateSubscriptionOutput] | ErrResult[ProtocolError]]: + return self._client.subscribe( + "cancel", + "immediateSubscription", + init, + abort_signal=abort_signal, + ) + + def counted_stream( + self, + init: CancelCountedStreamInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> StreamResult[CancelCountedStreamInput, OkResult[CancelCountedStreamOutput] | ErrResult[ProtocolError]]: + return self._client.stream( + "cancel", + "countedStream", + init, + abort_signal=abort_signal, + ) diff --git a/python-client/tests/generated/conftest.py b/python-client/tests/generated/conftest.py new file mode 100644 index 00000000..37d9ad16 --- /dev/null +++ b/python-client/tests/generated/conftest.py @@ -0,0 +1 @@ +collect_ignore_glob = ["*"] diff --git a/python-client/tests/generated/fallible_client.py b/python-client/tests/generated/fallible_client.py new file mode 100644 index 00000000..72b1f465 --- /dev/null +++ b/python-client/tests/generated/fallible_client.py @@ -0,0 +1,61 @@ +"""Generated client for the fallible service.""" + +from __future__ import annotations + +import asyncio +from typing import Any + +from river.client import ( + ErrResult, + OkResult, + RiverClient, + StreamResult, + SubscriptionResult, + UploadResult, +) + +from ._types import ( + FallibleDivideErrorDivByZero, + FallibleDivideErrorInfinity, + FallibleDivideInit, + FallibleDivideOutput, + FallibleEchoError, + FallibleEchoInit, + FallibleEchoInput, + FallibleEchoOutput, +) + +from ._errors import ProtocolError + + +class FallibleClient: + """Typed client for the ``fallible`` service.""" + + def __init__(self, client: RiverClient) -> None: + self._client = client + + async def divide( + self, + init: FallibleDivideInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> OkResult[FallibleDivideOutput] | ErrResult[FallibleDivideErrorDivByZero | FallibleDivideErrorInfinity | ProtocolError]: + return await self._client.rpc( + "fallible", + "divide", + init, + abort_signal=abort_signal, + ) + + def echo( + self, + init: FallibleEchoInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> StreamResult[FallibleEchoInput, OkResult[FallibleEchoOutput] | ErrResult[FallibleEchoError | ProtocolError]]: + return self._client.stream( + "fallible", + "echo", + init, + abort_signal=abort_signal, + ) diff --git a/python-client/tests/generated/ordering_client.py b/python-client/tests/generated/ordering_client.py new file mode 100644 index 00000000..eeb0b307 --- /dev/null +++ b/python-client/tests/generated/ordering_client.py @@ -0,0 +1,57 @@ +"""Generated client for the ordering service.""" + +from __future__ import annotations + +import asyncio +from typing import Any + +from river.client import ( + ErrResult, + OkResult, + RiverClient, + StreamResult, + SubscriptionResult, + UploadResult, +) + +from ._types import ( + OrderingAddInit, + OrderingAddOutput, + OrderingGetAllInit, + OrderingGetAllOutput, +) + +from ._errors import ProtocolError + + +class OrderingClient: + """Typed client for the ``ordering`` service.""" + + def __init__(self, client: RiverClient) -> None: + self._client = client + + async def add( + self, + init: OrderingAddInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> OkResult[OrderingAddOutput] | ErrResult[ProtocolError]: + return await self._client.rpc( + "ordering", + "add", + init, + abort_signal=abort_signal, + ) + + async def get_all( + self, + init: OrderingGetAllInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> OkResult[OrderingGetAllOutput] | ErrResult[ProtocolError]: + return await self._client.rpc( + "ordering", + "getAll", + init, + abort_signal=abort_signal, + ) diff --git a/python-client/tests/generated/subscribable_client.py b/python-client/tests/generated/subscribable_client.py new file mode 100644 index 00000000..dcc2207d --- /dev/null +++ b/python-client/tests/generated/subscribable_client.py @@ -0,0 +1,57 @@ +"""Generated client for the subscribable service.""" + +from __future__ import annotations + +import asyncio +from typing import Any + +from river.client import ( + ErrResult, + OkResult, + RiverClient, + StreamResult, + SubscriptionResult, + UploadResult, +) + +from ._types import ( + SubscribableAddInit, + SubscribableAddOutput, + SubscribableValueInit, + SubscribableValueOutput, +) + +from ._errors import ProtocolError + + +class SubscribableClient: + """Typed client for the ``subscribable`` service.""" + + def __init__(self, client: RiverClient) -> None: + self._client = client + + async def add( + self, + init: SubscribableAddInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> OkResult[SubscribableAddOutput] | ErrResult[ProtocolError]: + return await self._client.rpc( + "subscribable", + "add", + init, + abort_signal=abort_signal, + ) + + def value( + self, + init: SubscribableValueInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> SubscriptionResult[OkResult[SubscribableValueOutput] | ErrResult[ProtocolError]]: + return self._client.subscribe( + "subscribable", + "value", + init, + abort_signal=abort_signal, + ) diff --git a/python-client/tests/generated/test_client.py b/python-client/tests/generated/test_client.py new file mode 100644 index 00000000..94099461 --- /dev/null +++ b/python-client/tests/generated/test_client.py @@ -0,0 +1,104 @@ +"""Generated client for the test service.""" + +from __future__ import annotations + +import asyncio +from typing import Any + +from river.client import ( + ErrResult, + OkResult, + RiverClient, + StreamResult, + SubscriptionResult, + UploadResult, +) + +from ._types import ( + TestAddInit, + TestAddOutput, + TestEchoBinaryInit, + TestEchoBinaryOutput, + TestEchoInit, + TestEchoInput, + TestEchoOutput, + TestEchoRecursiveInit, + TestEchoRecursiveOutput, + TestEchoWithPrefixInit, + TestEchoWithPrefixInput, + TestEchoWithPrefixOutput, +) + +from ._errors import ProtocolError + + +class TestClient: + """Typed client for the ``test`` service.""" + + def __init__(self, client: RiverClient) -> None: + self._client = client + + async def add( + self, + init: TestAddInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> OkResult[TestAddOutput] | ErrResult[ProtocolError]: + return await self._client.rpc( + "test", + "add", + init, + abort_signal=abort_signal, + ) + + def echo( + self, + init: TestEchoInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> StreamResult[TestEchoInput, OkResult[TestEchoOutput] | ErrResult[ProtocolError]]: + return self._client.stream( + "test", + "echo", + init, + abort_signal=abort_signal, + ) + + def echo_with_prefix( + self, + init: TestEchoWithPrefixInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> StreamResult[TestEchoWithPrefixInput, OkResult[TestEchoWithPrefixOutput] | ErrResult[ProtocolError]]: + return self._client.stream( + "test", + "echoWithPrefix", + init, + abort_signal=abort_signal, + ) + + async def echo_binary( + self, + init: TestEchoBinaryInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> OkResult[TestEchoBinaryOutput] | ErrResult[ProtocolError]: + return await self._client.rpc( + "test", + "echoBinary", + init, + abort_signal=abort_signal, + ) + + async def echo_recursive( + self, + init: TestEchoRecursiveInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> OkResult[TestEchoRecursiveOutput] | ErrResult[ProtocolError]: + return await self._client.rpc( + "test", + "echoRecursive", + init, + abort_signal=abort_signal, + ) diff --git a/python-client/tests/generated/uploadable_client.py b/python-client/tests/generated/uploadable_client.py new file mode 100644 index 00000000..89ea12b5 --- /dev/null +++ b/python-client/tests/generated/uploadable_client.py @@ -0,0 +1,75 @@ +"""Generated client for the uploadable service.""" + +from __future__ import annotations + +import asyncio +from typing import Any + +from river.client import ( + ErrResult, + OkResult, + RiverClient, + StreamResult, + SubscriptionResult, + UploadResult, +) + +from ._types import ( + UploadableAddMultipleInit, + UploadableAddMultipleInput, + UploadableAddMultipleOutput, + UploadableAddMultipleWithPrefixInit, + UploadableAddMultipleWithPrefixInput, + UploadableAddMultipleWithPrefixOutput, + UploadableCancellableAddInit, + UploadableCancellableAddInput, + UploadableCancellableAddOutput, +) + +from ._errors import ProtocolError + + +class UploadableClient: + """Typed client for the ``uploadable`` service.""" + + def __init__(self, client: RiverClient) -> None: + self._client = client + + def add_multiple( + self, + init: UploadableAddMultipleInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> UploadResult[UploadableAddMultipleInput, OkResult[UploadableAddMultipleOutput] | ErrResult[ProtocolError]]: + return self._client.upload( + "uploadable", + "addMultiple", + init, + abort_signal=abort_signal, + ) + + def add_multiple_with_prefix( + self, + init: UploadableAddMultipleWithPrefixInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> UploadResult[UploadableAddMultipleWithPrefixInput, OkResult[UploadableAddMultipleWithPrefixOutput] | ErrResult[ProtocolError]]: + return self._client.upload( + "uploadable", + "addMultipleWithPrefix", + init, + abort_signal=abort_signal, + ) + + def cancellable_add( + self, + init: UploadableCancellableAddInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> UploadResult[UploadableCancellableAddInput, OkResult[UploadableCancellableAddOutput] | ErrResult[ProtocolError]]: + return self._client.upload( + "uploadable", + "cancellableAdd", + init, + abort_signal=abort_signal, + ) diff --git a/python-client/tests/generated_v2/__init__.py b/python-client/tests/generated_v2/__init__.py new file mode 100644 index 00000000..3bd51d7a --- /dev/null +++ b/python-client/tests/generated_v2/__init__.py @@ -0,0 +1,11 @@ +"""Generated River service clients.""" + +from ._root_client import TestServer +from ._types import HandshakeSchema +from .test_client import TestClient + +__all__ = [ + "TestServer", + "HandshakeSchema", + "TestClient", +] diff --git a/python-client/tests/generated_v2/_errors.py b/python-client/tests/generated_v2/_errors.py new file mode 100644 index 00000000..ba3e8c37 --- /dev/null +++ b/python-client/tests/generated_v2/_errors.py @@ -0,0 +1,45 @@ +"""Protocol-level error types for the River protocol. + +These errors can be returned by any procedure regardless of its +service-specific error schema. +""" + +from __future__ import annotations + +from typing import Literal + +from typing_extensions import NotRequired, TypedDict + + +class UncaughtError(TypedDict): + code: Literal["UNCAUGHT_ERROR"] + message: str + + +class UnexpectedDisconnect(TypedDict): + code: Literal["UNEXPECTED_DISCONNECT"] + message: str + + +class InvalidRequestExtrasItem(TypedDict): + path: str + message: str + + +class InvalidRequestExtras(TypedDict): + firstValidationErrors: list[InvalidRequestExtrasItem] + totalErrors: float + + +class InvalidRequest(TypedDict): + code: Literal["INVALID_REQUEST"] + message: str + extras: NotRequired[InvalidRequestExtras] + + +class Cancel(TypedDict): + code: Literal["CANCEL"] + message: str + + +ProtocolError = UncaughtError | UnexpectedDisconnect | InvalidRequest | Cancel diff --git a/python-client/tests/generated_v2/_root_client.py b/python-client/tests/generated_v2/_root_client.py new file mode 100644 index 00000000..fe144829 --- /dev/null +++ b/python-client/tests/generated_v2/_root_client.py @@ -0,0 +1,15 @@ +"""Generated root client aggregating all service clients.""" + +from __future__ import annotations + +from river.client import RiverClient + +from .test_client import TestClient + + +class TestServer: + """Aggregated client for all services.""" + + def __init__(self, client: RiverClient) -> None: + self._client = client + self.test = TestClient(client) diff --git a/python-client/tests/generated_v2/_types.py b/python-client/tests/generated_v2/_types.py new file mode 100644 index 00000000..acd1ebea --- /dev/null +++ b/python-client/tests/generated_v2/_types.py @@ -0,0 +1,17 @@ +"""Generated type definitions for River services.""" + +from __future__ import annotations + +from typing_extensions import TypedDict + + +class TestEchoInit(TypedDict): + msg: str + + +class TestEchoOutput(TypedDict): + response: str + + +class HandshakeSchema(TypedDict): + token: str diff --git a/python-client/tests/generated_v2/test_client.py b/python-client/tests/generated_v2/test_client.py new file mode 100644 index 00000000..f92f2e6d --- /dev/null +++ b/python-client/tests/generated_v2/test_client.py @@ -0,0 +1,37 @@ +"""Generated client for the test service.""" + +from __future__ import annotations + +import asyncio + +from river.client import ( + ErrResult, + OkResult, + RiverClient, +) + +from ._errors import ProtocolError +from ._types import ( + TestEchoInit, + TestEchoOutput, +) + + +class TestClient: + """Typed client for the ``test`` service.""" + + def __init__(self, client: RiverClient) -> None: + self._client = client + + async def echo( + self, + init: TestEchoInit, + *, + abort_signal: asyncio.Event | None = None, + ) -> OkResult[TestEchoOutput] | ErrResult[ProtocolError]: + return await self._client.rpc( + "test", + "echo", + init, + abort_signal=abort_signal, + ) diff --git a/python-client/tests/test_codegen.py b/python-client/tests/test_codegen.py new file mode 100644 index 00000000..43ed1c10 --- /dev/null +++ b/python-client/tests/test_codegen.py @@ -0,0 +1,1665 @@ +"""Tests for River codegen pipeline. + +Tests the full pipeline: schema extraction → codegen → import → live usage. +""" + +from __future__ import annotations + +import json +import os +import sys + +import pytest + +TESTS_DIR = os.path.dirname(__file__) +SCHEMA_JSON = os.path.join(TESTS_DIR, "test_schema.json") +GENERATED_DIR = os.path.join(TESTS_DIR, "generated") + + +# --------------------------------------------------------------------------- +# Schema conversion tests +# --------------------------------------------------------------------------- + + +class TestSchemaConversion: + """Test JSON Schema → IR conversion.""" + + @pytest.fixture(autouse=True) + def _setup(self, generated_client_dir: str) -> None: + """Ensure codegen has run.""" + + def _load_schema(self) -> dict: + with open(SCHEMA_JSON) as f: + return json.load(f) + + def test_schema_has_services(self) -> None: + schema = self._load_schema() + assert "services" in schema + svc_names = set(schema["services"].keys()) + assert svc_names == { + "test", + "ordering", + "fallible", + "subscribable", + "uploadable", + "cancel", + } + + def test_converter_produces_ir(self) -> None: + from river.codegen.schema import SchemaConverter + + schema = self._load_schema() + converter = SchemaConverter() + ir = converter.convert(schema) + + svc_names = {s.name for s in ir.services} + assert svc_names == { + "test", + "ordering", + "fallible", + "subscribable", + "uploadable", + "cancel", + } + + def test_test_service_procedures(self) -> None: + from river.codegen.schema import SchemaConverter + + schema = self._load_schema() + converter = SchemaConverter() + ir = converter.convert(schema) + + test_svc = next(s for s in ir.services if s.name == "test") + proc_names = {p.name for p in test_svc.procedures} + assert "add" in proc_names + assert "echo" in proc_names + assert "echoWithPrefix" in proc_names + + def test_procedure_types(self) -> None: + from river.codegen.schema import SchemaConverter + + schema = self._load_schema() + converter = SchemaConverter() + ir = converter.convert(schema) + + test_svc = next(s for s in ir.services if s.name == "test") + procs = {p.name: p for p in test_svc.procedures} + + assert procs["add"].proc_type == "rpc" + assert procs["echo"].proc_type == "stream" + assert procs["echoWithPrefix"].proc_type == "stream" + + def test_snake_case_method_names(self) -> None: + from river.codegen.schema import SchemaConverter + + schema = self._load_schema() + converter = SchemaConverter() + ir = converter.convert(schema) + + test_svc = next(s for s in ir.services if s.name == "test") + procs = {p.name: p for p in test_svc.procedures} + + assert procs["echoWithPrefix"].py_name == "echo_with_prefix" + assert procs["add"].py_name == "add" + + def test_typedicts_generated(self) -> None: + from river.codegen.schema import SchemaConverter + + schema = self._load_schema() + converter = SchemaConverter() + ir = converter.convert(schema) + + td_names = {td.name for td in ir.typedicts} + assert "TestAddInit" in td_names + assert "TestEchoInit" in td_names + assert "TestEchoInput" in td_names + assert "TestEchoWithPrefixInit" in td_names + + def test_fallible_service_errors(self) -> None: + from river.codegen.schema import SchemaConverter + + schema = self._load_schema() + converter = SchemaConverter() + ir = converter.convert(schema) + + fallible_svc = next(s for s in ir.services if s.name == "fallible") + divide_proc = next(p for p in fallible_svc.procedures if p.name == "divide") + + # Should have service-specific errors + assert divide_proc.error_type is not None + assert "DivByZero" in divide_proc.error_type.annotation + assert "Infinity" in divide_proc.error_type.annotation + + def test_upload_procedures(self) -> None: + from river.codegen.schema import SchemaConverter + + schema = self._load_schema() + converter = SchemaConverter() + ir = converter.convert(schema) + + upload_svc = next(s for s in ir.services if s.name == "uploadable") + procs = {p.name: p for p in upload_svc.procedures} + + assert procs["addMultiple"].proc_type == "upload" + assert procs["addMultiple"].input_type is not None + + def test_subscription_procedures(self) -> None: + from river.codegen.schema import SchemaConverter + + schema = self._load_schema() + converter = SchemaConverter() + ir = converter.convert(schema) + + sub_svc = next(s for s in ir.services if s.name == "subscribable") + procs = {p.name: p for p in sub_svc.procedures} + + assert procs["value"].proc_type == "subscription" + assert procs["value"].input_type is None + + +# --------------------------------------------------------------------------- +# Generated code import tests +# --------------------------------------------------------------------------- + + +class TestGeneratedImports: + """Test that generated code can be imported.""" + + @pytest.fixture(autouse=True) + def _setup(self, generated_client_dir: str) -> None: + """Ensure codegen has run and generated dir is on sys.path.""" + if TESTS_DIR not in sys.path: + sys.path.insert(0, TESTS_DIR) + + def test_import_init(self) -> None: + import generated + + assert hasattr(generated, "TestClient") + assert hasattr(generated, "FallibleClient") + assert hasattr(generated, "UploadableClient") + assert hasattr(generated, "SubscribableClient") + assert hasattr(generated, "OrderingClient") + assert hasattr(generated, "CancelClient") + + def test_import_types(self) -> None: + from generated._types import ( + TestAddInit, + TestEchoInit, + TestEchoInput, + TestEchoWithPrefixInit, + ) + + # TypedDicts should be classes + assert isinstance(TestAddInit, type) + assert isinstance(TestEchoInit, type) + assert isinstance(TestEchoInput, type) + assert isinstance(TestEchoWithPrefixInit, type) + + def test_import_errors(self) -> None: + from generated._errors import ( + Cancel, + InvalidRequest, + UncaughtError, + UnexpectedDisconnect, + ) + + assert isinstance(UncaughtError, type) + assert isinstance(UnexpectedDisconnect, type) + assert isinstance(InvalidRequest, type) + assert isinstance(Cancel, type) + + def test_client_class_has_methods(self) -> None: + from generated import TestClient + + assert hasattr(TestClient, "add") + assert hasattr(TestClient, "echo") + assert hasattr(TestClient, "echo_with_prefix") + + def test_fallible_client_has_methods(self) -> None: + from generated import FallibleClient + + assert hasattr(FallibleClient, "divide") + assert hasattr(FallibleClient, "echo") + + def test_uploadable_client_has_methods(self) -> None: + from generated import UploadableClient + + assert hasattr(UploadableClient, "add_multiple") + assert hasattr(UploadableClient, "add_multiple_with_prefix") + assert hasattr(UploadableClient, "cancellable_add") + + def test_subscribable_client_has_methods(self) -> None: + from generated import SubscribableClient + + assert hasattr(SubscribableClient, "add") + assert hasattr(SubscribableClient, "value") + + +# --------------------------------------------------------------------------- +# Live test server integration tests +# --------------------------------------------------------------------------- + + +class TestGeneratedClientsLive: + """Test generated proxy clients against the live test server.""" + + @pytest.fixture(autouse=True) + def _setup(self, generated_client_dir: str) -> None: + if TESTS_DIR not in sys.path: + sys.path.insert(0, TESTS_DIR) + + async def _make_client(self, server_url: str): + from river import ( + BinaryCodec, + RiverClient, + WebSocketClientTransport, + ) + + transport = WebSocketClientTransport( + server_url, + client_id="test-codegen-client", + server_id="SERVER", + codec=BinaryCodec(), + ) + client = RiverClient(transport, server_id="SERVER") + return client, transport + + async def test_rpc_via_generated_client(self, server_url: str) -> None: + from generated import TestClient + + client, transport = await self._make_client(server_url) + try: + test = TestClient(client) + result = await test.add({"n": 0}) + assert result["ok"] is True + assert isinstance(result["payload"]["result"], (int, float)) + finally: + await transport.close() + + async def test_stream_via_generated_client(self, server_url: str) -> None: + from generated import TestClient + + client, transport = await self._make_client(server_url) + try: + test = TestClient(client) + stream = test.echo({}) + + stream.req_writable.write({"msg": "hello", "ignore": False}) + stream.req_writable.write({"msg": "world", "ignore": False}) + stream.req_writable.close() + + messages = [] + async for msg in stream.res_readable: + if msg.get("ok"): + messages.append(msg["payload"]["response"]) + + assert "hello" in messages + assert "world" in messages + finally: + await transport.close() + + async def test_stream_with_prefix_via_generated_client( + self, server_url: str + ) -> None: + from generated import TestClient + + client, transport = await self._make_client(server_url) + try: + test = TestClient(client) + stream = test.echo_with_prefix({"prefix": ">>>"}) + + stream.req_writable.write({"msg": "test", "ignore": False}) + stream.req_writable.close() + + messages = [] + async for msg in stream.res_readable: + if msg.get("ok"): + messages.append(msg["payload"]["response"]) + + assert len(messages) == 1 + assert messages[0] == ">>> test" + finally: + await transport.close() + + async def test_upload_via_generated_client(self, server_url: str) -> None: + from generated import UploadableClient + + client, transport = await self._make_client(server_url) + try: + upload_client = UploadableClient(client) + upload = upload_client.add_multiple({}) + + upload.req_writable.write({"n": 1}) + upload.req_writable.write({"n": 2}) + upload.req_writable.write({"n": 3}) + upload.req_writable.close() + + result = await upload.finalize() + assert result["ok"] is True + assert result["payload"]["result"] == 6 + finally: + await transport.close() + + async def test_subscription_via_generated_client(self, server_url: str) -> None: + from generated import SubscribableClient + + client, transport = await self._make_client(server_url) + try: + sub_client = SubscribableClient(client) + sub = sub_client.value({}) + + # Get the initial value + done, msg = await sub.res_readable.next() + assert not done + assert msg is not None + assert msg["ok"] is True + assert "count" in msg["payload"] + + sub.res_readable.break_() + finally: + await transport.close() + + async def test_fallible_rpc_success(self, server_url: str) -> None: + from generated import FallibleClient + + client, transport = await self._make_client(server_url) + try: + fallible = FallibleClient(client) + result = await fallible.divide({"a": 10, "b": 2}) + assert result["ok"] is True + assert result["payload"]["result"] == 5.0 + finally: + await transport.close() + + async def test_fallible_rpc_error(self, server_url: str) -> None: + from generated import FallibleClient + + client, transport = await self._make_client(server_url) + try: + fallible = FallibleClient(client) + result = await fallible.divide({"a": 10, "b": 0}) + assert result["ok"] is False + assert result["payload"]["code"] == "DIV_BY_ZERO" + finally: + await transport.close() + + +class TestCodegenFieldNames: + """Codegen field name validation tests.""" + + def test_keyword_field_normalized(self): + """Python keywords get an underscore suffix.""" + from river.codegen.schema import _safe_field_name + + assert _safe_field_name("from") == "from_" + assert _safe_field_name("class") == "class_" + assert _safe_field_name("import") == "import_" + + def test_normal_field_unchanged(self): + from river.codegen.schema import _safe_field_name + + assert _safe_field_name("name") == "name" + assert _safe_field_name("streamId") == "streamId" + + def test_underscore_prefixed_field_accepted(self): + """Underscore-prefixed fields like _id are valid Python identifiers. + + Regression: _sanitize_identifier stripped leading underscores, + causing _safe_field_name to reject valid fields like '_id'. + """ + from river.codegen.schema import _safe_field_name + + assert _safe_field_name("_id") == "_id" + assert _safe_field_name("_private") == "_private" + + def test_dunder_field_normalized(self): + """Double-underscore-prefixed fields get an extra underscore prefix.""" + from river.codegen.schema import _safe_field_name + + assert _safe_field_name("__dunder") == "___dunder" + assert _safe_field_name("__private") == "___private" + # Dunder methods (ending with __) are NOT mangled + assert _safe_field_name("__init__") == "__init__" + + def test_schema_with_underscore_prefixed_field(self): + """Schemas with underscore-prefixed properties generate correctly.""" + from river.codegen.schema import SchemaConverter + + converter = SchemaConverter() + schema = { + "type": "object", + "properties": { + "_id": {"type": "string"}, + "name": {"type": "string"}, + }, + "required": ["_id", "name"], + } + ref = converter._schema_to_typeref(schema, "Doc") + assert ref.annotation == "Doc" + td = converter._typedicts[-1] + field_names = [f.name for f in td.fields] + assert "_id" in field_names + assert "name" in field_names + + def test_dash_field_normalized(self): + """Fields with dashes are normalized.""" + from river.codegen.schema import _safe_field_name + + assert _safe_field_name("request-id") == "request_id" + + def test_dollar_field_normalized(self): + """Fields with dollar signs are normalized.""" + from river.codegen.schema import _safe_field_name + + assert _safe_field_name("$kind") == "_kind" + + def test_schema_with_invalid_field_normalized(self): + """Codegen normalizes non-identifier property names.""" + from river.codegen.schema import SchemaConverter + + converter = SchemaConverter() + schema = { + "type": "object", + "properties": { + "request-id": {"type": "string"}, + "normal": {"type": "string"}, + }, + "required": ["request-id", "normal"], + } + ref = converter._schema_to_typeref(schema, "TestObj") + assert ref.annotation == "TestObj" + td = converter._typedicts[-1] + field_names = [f.name for f in td.fields] + assert "request_id" in field_names + assert "normal" in field_names + + def test_schema_with_keyword_field_normalized(self): + """Codegen normalizes keyword property names.""" + from river.codegen.schema import SchemaConverter + + converter = SchemaConverter() + schema = { + "type": "object", + "properties": { + "from": {"type": "string"}, + }, + "required": ["from"], + } + ref = converter._schema_to_typeref(schema, "TestObj") + assert ref.annotation == "TestObj" + td = converter._typedicts[-1] + assert td.fields[0].name == "from_" + + def test_collision_raises(self): + """Codegen raises when two properties normalize to the same name.""" + from river.codegen.schema import SchemaConverter + + converter = SchemaConverter() + schema = { + "type": "object", + "properties": { + "$kind": {"type": "string"}, + "_kind": {"type": "string"}, + }, + "required": ["$kind", "_kind"], + } + with pytest.raises(ValueError, match="both normalize to"): + converter._schema_to_typeref(schema, "TestObj") + + def test_valid_schema_passes(self): + """Schemas with normal camelCase properties work fine.""" + from river.codegen.schema import SchemaConverter + + converter = SchemaConverter() + schema = { + "type": "object", + "properties": { + "userId": {"type": "string"}, + "count": {"type": "number"}, + }, + "required": ["userId", "count"], + } + ref = converter._schema_to_typeref(schema, "TestObj") + assert ref.annotation == "TestObj" + td = converter._typedicts[-1] + assert [f.name for f in td.fields] == ["userId", "count"] + + +class TestNameCollisions: + """Codegen detects and rejects name collisions.""" + + def test_procedure_name_collision_raises(self): + """Two procedures that collide (method name or TypedDict) are rejected.""" + from river.codegen.schema import SchemaConverter + + raw = { + "services": { + "svc": { + "procedures": { + "fooBar": { + "type": "rpc", + "init": {"type": "object", "properties": {}}, + "output": {"type": "object", "properties": {}}, + }, + "foo_bar": { + "type": "rpc", + "init": {"type": "object", "properties": {}}, + "output": {"type": "object", "properties": {}}, + }, + } + } + } + } + converter = SchemaConverter() + with pytest.raises(ValueError): + converter.convert(raw) + + def test_service_module_collision_raises(self): + """Two services that map to the same module name are rejected.""" + from river.codegen.schema import SchemaConverter + + raw = { + "services": { + "foo-bar": { + "procedures": {}, + }, + "foo_bar": { + "procedures": {}, + }, + } + } + converter = SchemaConverter() + with pytest.raises(ValueError, match="foo_bar"): + converter.convert(raw) + + def test_no_collision_passes(self): + """Distinct names that don't collide work fine.""" + from river.codegen.schema import SchemaConverter + + raw = { + "services": { + "alpha": { + "procedures": { + "doX": { + "type": "rpc", + "init": {"type": "object", "properties": {}}, + "output": {"type": "object", "properties": {}}, + }, + "doY": { + "type": "rpc", + "init": {"type": "object", "properties": {}}, + "output": {"type": "object", "properties": {}}, + }, + } + }, + "beta": { + "procedures": {}, + }, + } + } + converter = SchemaConverter() + ir = converter.convert(raw) + assert len(ir.services) == 2 + + def test_service_class_name_collision_raises(self): + """Two services that map to the same class name are rejected.""" + from river.codegen.schema import SchemaConverter + + raw = { + "services": { + "foo_bar": {"procedures": {}}, + "FooBar": {"procedures": {}}, + } + } + converter = SchemaConverter() + with pytest.raises(ValueError, match="FooBarClient"): + converter.convert(raw) + + def test_description_with_triple_quotes(self): + """Descriptions containing triple quotes are escaped in output.""" + from river.codegen.emitter import _escape_docstring + + assert '"""' not in _escape_docstring('bad """ doc') + # Internal triple quotes are escaped; trailing " also escaped + result = _escape_docstring('say """hello"""') + assert '"""' not in result + assert not result.endswith('"') or result.endswith(r'\"') + + def test_description_ending_with_quote(self): + """Trailing quote is escaped to avoid merging with closing triple-quote.""" + from river.codegen.emitter import _escape_docstring + + result = _escape_docstring('example: "hello"') + # Must not end with unescaped " which would form """" with closing """ + assert not result.endswith('"') or result.endswith(r'\"') + assert result == r'example: "hello\"' + + def test_description_ending_without_quote(self): + """Non-quote endings are left unchanged.""" + from river.codegen.emitter import _escape_docstring + + assert _escape_docstring("normal text") == "normal text" + + def test_typedict_name_collision_deduplicates(self): + """Two properties that generate the same TypedDict name — first wins.""" + from river.codegen.schema import SchemaConverter + + converter = SchemaConverter() + schema = { + "type": "object", + "properties": { + "fooBar": { + "type": "object", + "properties": {"a": {"type": "string"}}, + }, + "FooBar": { + "type": "object", + "properties": {"b": {"type": "number"}}, + }, + }, + } + converter._schema_to_typeref(schema, "Prefix") + matching = [td for td in converter._typedicts if td.name == "PrefixFooBar"] + assert len(matching) == 1 + # First definition wins + assert matching[0].fields[0].name == "a" + + def test_empty_anyof_is_never(self): + """anyOf with zero variants → Never.""" + from river.codegen.schema import SchemaConverter + + converter = SchemaConverter() + ref = converter._schema_to_typeref({"anyOf": []}, "X") + assert ref.annotation == "Never" + + +# --------------------------------------------------------------------------- +# Complex type tests +# --------------------------------------------------------------------------- + + +class TestComplexTypes: + """Test codegen with complex JSON Schema types.""" + + def _convert(self, schema: dict, name: str = "Test"): + from river.codegen.schema import SchemaConverter + + converter = SchemaConverter() + ref = converter._schema_to_typeref(schema, name) + return ref, converter._typedicts + + # -- Deeply nested objects -- + + def test_deeply_nested_objects(self): + """Objects nested 4 levels deep get path-derived names.""" + schema = { + "type": "object", + "properties": { + "level1": { + "type": "object", + "properties": { + "level2": { + "type": "object", + "properties": { + "level3": { + "type": "object", + "properties": { + "value": {"type": "string"}, + }, + "required": ["value"], + } + }, + "required": ["level3"], + } + }, + "required": ["level2"], + } + }, + "required": ["level1"], + } + ref, tds = self._convert(schema, "Root") + assert ref.annotation == "Root" + + td_names = [td.name for td in tds] + assert "Root" in td_names + assert "RootLevel1" in td_names + assert "RootLevel1Level2" in td_names + assert "RootLevel1Level2Level3" in td_names + + # Innermost TypedDict has the value field + innermost = next(td for td in tds if td.name == "RootLevel1Level2Level3") + assert len(innermost.fields) == 1 + assert innermost.fields[0].name == "value" + assert innermost.fields[0].type_ref.annotation == "str" + + def test_nested_object_in_array(self): + """Array of objects creates a TypedDict for the item type.""" + schema = { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "number"}, + "name": {"type": "string"}, + }, + "required": ["id", "name"], + }, + } + ref, tds = self._convert(schema, "ItemList") + assert ref.annotation == "list[ItemListItem]" + assert any(td.name == "ItemListItem" for td in tds) + + def test_nested_array_of_arrays(self): + """Nested arrays: list[list[str]].""" + schema = { + "type": "array", + "items": { + "type": "array", + "items": {"type": "string"}, + }, + } + ref, _ = self._convert(schema, "Matrix") + assert ref.annotation == "list[list[str]]" + + # -- Union types (anyOf) -- + + def test_discriminated_union_with_code_field(self): + """anyOf with const code fields → named TypedDicts.""" + schema = { + "anyOf": [ + { + "type": "object", + "properties": { + "code": {"const": "SUCCESS"}, + "data": {"type": "string"}, + }, + "required": ["code", "data"], + }, + { + "type": "object", + "properties": { + "code": {"const": "FAILURE"}, + "reason": {"type": "string"}, + }, + "required": ["code", "reason"], + }, + ] + } + ref, tds = self._convert(schema, "Result") + assert "ResultSuccess" in ref.annotation + assert "ResultFailure" in ref.annotation + assert "|" in ref.annotation + + td_names = {td.name for td in tds} + assert "ResultSuccess" in td_names + assert "ResultFailure" in td_names + + def test_non_discriminated_union_objects(self): + """anyOf with objects but no const code → indexed variant names.""" + schema = { + "anyOf": [ + { + "type": "object", + "properties": {"x": {"type": "number"}}, + "required": ["x"], + }, + { + "type": "object", + "properties": {"y": {"type": "string"}}, + "required": ["y"], + }, + ] + } + ref, tds = self._convert(schema, "Point") + # Without code or description, should get Variant0/Variant1 + assert "PointVariant0" in ref.annotation + assert "PointVariant1" in ref.annotation + + def test_union_mixed_types_primitives_and_objects(self): + """anyOf mixing primitives and objects.""" + schema = { + "anyOf": [ + {"type": "string"}, + {"type": "number"}, + { + "type": "object", + "properties": {"value": {"type": "boolean"}}, + "required": ["value"], + }, + ] + } + ref, tds = self._convert(schema, "Mixed") + # Should include str, float, and a TypedDict + assert "str" in ref.annotation + assert "float" in ref.annotation + assert "MixedVariant2" in ref.annotation + assert any(td.name == "MixedVariant2" for td in tds) + + def test_union_with_null(self): + """anyOf with null → includes None in union.""" + schema = { + "anyOf": [ + {"type": "string"}, + {"type": "null"}, + ] + } + ref, _ = self._convert(schema, "Nullable") + assert "str" in ref.annotation + assert "None" in ref.annotation + + def test_union_primitives_only(self): + """anyOf with only primitives → no TypedDicts created.""" + schema = { + "anyOf": [ + {"type": "string"}, + {"type": "number"}, + {"type": "boolean"}, + ] + } + ref, tds = self._convert(schema, "Prim") + assert ref.annotation == "str | float | bool" + # No TypedDicts should be created for primitives + assert len(tds) == 0 + + def test_single_variant_anyof_unwrapped(self): + """anyOf with a single variant is unwrapped.""" + schema = { + "anyOf": [ + {"type": "string"}, + ] + } + ref, _ = self._convert(schema, "Single") + assert ref.annotation == "str" + + def test_union_with_description_variants(self): + """anyOf variants with descriptions use them for names.""" + schema = { + "anyOf": [ + { + "description": "Circle", + "type": "object", + "properties": {"radius": {"type": "number"}}, + "required": ["radius"], + }, + { + "description": "Rectangle", + "type": "object", + "properties": { + "width": {"type": "number"}, + "height": {"type": "number"}, + }, + "required": ["width", "height"], + }, + ] + } + ref, tds = self._convert(schema, "Shape") + assert "ShapeCircle" in ref.annotation + assert "ShapeRectangle" in ref.annotation + + # -- Recursive / self-referencing schemas -- + + def test_recursive_ref_with_id(self): + """$id/$ref pair → forward reference by name.""" + schema = { + "$id": "T0", + "type": "object", + "properties": { + "n": {"type": "number"}, + "next": {"$ref": "T0"}, + }, + "required": ["n"], + } + ref, tds = self._convert(schema, "TreeNode") + assert ref.annotation == "TreeNode" + td = next(td for td in tds if td.name == "TreeNode") + next_field = next(f for f in td.fields if f.name == "next") + # Should be a forward reference to itself, not Any + assert next_field.type_ref.annotation == "TreeNode" + + def test_recursive_ref_in_array(self): + """Recursive type used as array items.""" + schema = { + "$id": "Node", + "type": "object", + "properties": { + "value": {"type": "string"}, + "children": { + "type": "array", + "items": {"$ref": "Node"}, + }, + }, + "required": ["value"], + } + ref, tds = self._convert(schema, "TreeNode") + assert ref.annotation == "TreeNode" + td = next(td for td in tds if td.name == "TreeNode") + children_field = next(f for f in td.fields if f.name == "children") + assert children_field.type_ref.annotation == "list[TreeNode]" + + def test_unknown_ref_is_never(self): + """$ref to an unknown $id → Never (broken schema).""" + schema = {"$ref": "NonExistent"} + ref, _ = self._convert(schema, "X") + assert ref.annotation == "Never" + + def test_multiple_recursive_types(self): + """Two independent recursive types don't collide.""" + from river.codegen.schema import SchemaConverter + + converter = SchemaConverter() + + schema_a = { + "$id": "A", + "type": "object", + "properties": { + "val": {"type": "number"}, + "link": {"$ref": "A"}, + }, + "required": ["val"], + } + schema_b = { + "$id": "B", + "type": "object", + "properties": { + "name": {"type": "string"}, + "parent": {"$ref": "B"}, + }, + "required": ["name"], + } + + ref_a = converter._schema_to_typeref(schema_a, "LinkedList") + ref_b = converter._schema_to_typeref(schema_b, "Category") + + assert ref_a.annotation == "LinkedList" + assert ref_b.annotation == "Category" + + tds = converter._typedicts + ll = next(td for td in tds if td.name == "LinkedList") + cat = next(td for td in tds if td.name == "Category") + + link_field = next(f for f in ll.fields if f.name == "link") + assert link_field.type_ref.annotation == "LinkedList" + + parent_field = next(f for f in cat.fields if f.name == "parent") + assert parent_field.type_ref.annotation == "Category" + + # -- Const values -- + + def test_const_string(self): + ref, _ = self._convert({"const": "hello"}, "X") + assert ref.annotation == 'Literal["hello"]' + + def test_const_number(self): + ref, _ = self._convert({"const": 42}, "X") + assert ref.annotation == "Literal[42]" + + def test_const_boolean(self): + ref, _ = self._convert({"const": True}, "X") + assert ref.annotation == "Literal[True]" + + def test_const_string_with_special_chars(self): + """Const strings with quotes/backslashes are properly escaped.""" + ref, _ = self._convert({"const": 'say "hello"'}, "X") + assert "Literal[" in ref.annotation + # Should be valid Python — no unescaped quotes + assert ref.annotation.count('"') % 2 == 0 or '\\"' in ref.annotation + + # -- Edge cases -- + + def test_empty_object(self): + """Object with no properties → TypedDict with pass.""" + schema = {"type": "object", "properties": {}} + ref, tds = self._convert(schema, "Empty") + assert ref.annotation == "Empty" + td = next(td for td in tds if td.name == "Empty") + assert len(td.fields) == 0 + + def test_object_all_optional_fields(self): + """Object with no required fields → all NotRequired.""" + schema = { + "type": "object", + "properties": { + "a": {"type": "string"}, + "b": {"type": "number"}, + }, + # no "required" key + } + ref, tds = self._convert(schema, "Opts") + td = next(td for td in tds if td.name == "Opts") + assert all(not f.required for f in td.fields) + + def test_object_mixed_required_optional(self): + """Object with some required, some optional fields.""" + schema = { + "type": "object", + "properties": { + "id": {"type": "number"}, + "name": {"type": "string"}, + "email": {"type": "string"}, + }, + "required": ["id"], + } + ref, tds = self._convert(schema, "User") + td = next(td for td in tds if td.name == "User") + field_map = {f.name: f for f in td.fields} + assert field_map["id"].required is True + assert field_map["name"].required is False + assert field_map["email"].required is False + + def test_unknown_type_falls_back_to_any(self): + """Unrecognized type string → Any.""" + ref, _ = self._convert({"type": "foobar"}, "X") + assert ref.annotation == "Any" + + def test_no_type_no_anyof_no_const_falls_back_to_any(self): + """Schema with no recognizable keys → Any.""" + ref, _ = self._convert({"description": "mystery"}, "X") + assert ref.annotation == "Any" + + def test_non_dict_schema_falls_back_to_any(self): + """Non-dict passed as schema → Any.""" + from river.codegen.schema import SchemaConverter + + converter = SchemaConverter() + ref = converter._schema_to_typeref("not a dict", "X") # type: ignore[arg-type] + assert ref.annotation == "Any" + + def test_array_with_no_items(self): + """Array with no items key → list[Any].""" + ref, _ = self._convert({"type": "array"}, "X") + assert ref.annotation == "list[Any]" + + def test_all_primitive_types(self): + """All primitive JSON Schema types map correctly.""" + cases = { + "string": "str", + "number": "float", + "integer": "int", + "boolean": "bool", + "null": "None", + "Uint8Array": "bytes", + } + for json_type, py_type in cases.items(): + ref, _ = self._convert({"type": json_type}, "X") + assert ref.annotation == py_type, f"Failed for {json_type}" + + # -- allOf (intersection) -- + + def test_allof_merges_object_properties(self): + """allOf with objects → merged TypedDict.""" + schema = { + "allOf": [ + { + "type": "object", + "properties": { + "a": {"type": "string"}, + "b": {"type": "number"}, + }, + "required": ["a", "b"], + }, + { + "type": "object", + "properties": { + "c": {"type": "boolean"}, + }, + "required": ["c"], + }, + ] + } + ref, tds = self._convert(schema, "Merged") + assert ref.annotation == "Merged" + td = next(td for td in tds if td.name == "Merged") + field_map = {f.name: f for f in td.fields} + assert field_map["a"].type_ref.annotation == "str" + assert field_map["b"].type_ref.annotation == "float" + assert field_map["c"].type_ref.annotation == "bool" + assert all(f.required for f in td.fields) + + def test_allof_with_type_object_wrapper(self): + """TypeBox emits {type: 'object', allOf: [...]} — both forms work.""" + schema = { + "type": "object", + "allOf": [ + { + "type": "object", + "properties": {"x": {"type": "number"}}, + "required": ["x"], + }, + { + "type": "object", + "properties": {"y": {"type": "number"}}, + "required": ["y"], + }, + ], + } + ref, tds = self._convert(schema, "Point") + assert ref.annotation == "Point" + td = next(td for td in tds if td.name == "Point") + assert {f.name for f in td.fields} == {"x", "y"} + + def test_allof_overlapping_fields(self): + """Overlapping properties in allOf → last definition wins.""" + schema = { + "allOf": [ + { + "type": "object", + "properties": { + "id": {"type": "string"}, + "name": {"type": "string"}, + }, + "required": ["id", "name"], + }, + { + "type": "object", + "properties": { + "name": {"type": "string"}, + "age": {"type": "number"}, + }, + "required": ["name"], + }, + ] + } + ref, tds = self._convert(schema, "Person") + assert ref.annotation == "Person" + td = next(td for td in tds if td.name == "Person") + field_map = {f.name: f for f in td.fields} + # "id" required from first, "name" required from both, "age" optional + assert field_map["id"].required is True + assert field_map["name"].required is True + assert field_map["age"].required is False + + def test_allof_with_nested_objects(self): + """allOf variants can contain nested objects.""" + schema = { + "allOf": [ + { + "type": "object", + "properties": { + "meta": { + "type": "object", + "properties": {"version": {"type": "number"}}, + "required": ["version"], + } + }, + "required": ["meta"], + }, + { + "type": "object", + "properties": { + "data": {"type": "string"}, + }, + "required": ["data"], + }, + ] + } + ref, tds = self._convert(schema, "Envelope") + assert ref.annotation == "Envelope" + td_names = {td.name for td in tds} + assert "Envelope" in td_names + assert "EnvelopeMeta" in td_names + + def test_allof_mixed_types_is_never(self): + """allOf with object + primitive → Never (contradictory).""" + schema = { + "allOf": [ + { + "type": "object", + "properties": {"x": {"type": "number"}}, + "required": ["x"], + }, + {"type": "string"}, + ] + } + ref, tds = self._convert(schema, "Mixed") + assert ref.annotation == "Never" + + def test_allof_only_primitives_is_never(self): + """allOf with only primitives → Never (contradictory intersection).""" + schema = { + "allOf": [ + {"type": "string"}, + {"type": "number"}, + ] + } + ref, _ = self._convert(schema, "Weird") + assert ref.annotation == "Never" + + def test_allof_empty_is_never(self): + """allOf with no variants → Never.""" + schema = {"allOf": []} + ref, _ = self._convert(schema, "Empty") + assert ref.annotation == "Never" + + # -- Full service schema with complex types -- + + def test_service_with_complex_types(self): + """Full service schema with unions, nested objects, arrays.""" + from river.codegen.schema import SchemaConverter + + raw = { + "services": { + "complex": { + "procedures": { + "transform": { + "type": "rpc", + "init": { + "type": "object", + "properties": { + "input": { + "anyOf": [ + {"type": "string"}, + {"type": "number"}, + { + "type": "object", + "properties": { + "nested": { + "type": "object", + "properties": { + "deep": {"type": "boolean"} + }, + "required": ["deep"], + } + }, + "required": ["nested"], + }, + ] + }, + "tags": { + "type": "array", + "items": { + "type": "object", + "properties": { + "key": {"type": "string"}, + "value": {"type": "string"}, + }, + "required": ["key", "value"], + }, + }, + }, + "required": ["input"], + }, + "output": { + "type": "object", + "properties": { + "result": {"type": "string"}, + }, + "required": ["result"], + }, + "errors": { + "anyOf": [ + { + "properties": { + "code": {"const": "UNCAUGHT_ERROR"}, + "message": {"type": "string"}, + }, + "required": ["code", "message"], + "type": "object", + }, + { + "properties": { + "code": {"const": "UNEXPECTED_DISCONNECT"}, + "message": {"type": "string"}, + }, + "required": ["code", "message"], + "type": "object", + }, + { + "properties": { + "code": {"const": "INVALID_REQUEST"}, + "message": {"type": "string"}, + }, + "required": ["code", "message"], + "type": "object", + }, + { + "properties": { + "code": {"const": "CANCEL"}, + "message": {"type": "string"}, + }, + "required": ["code", "message"], + "type": "object", + }, + { + "properties": { + "code": {"const": "TRANSFORM_FAILED"}, + "message": {"type": "string"}, + "details": { + "type": "object", + "properties": { + "field": {"type": "string"}, + "reason": {"type": "string"}, + }, + "required": ["field", "reason"], + }, + }, + "required": ["code", "message"], + "type": "object", + }, + ] + }, + } + } + } + } + } + + converter = SchemaConverter() + ir = converter.convert(raw) + + assert len(ir.services) == 1 + svc = ir.services[0] + assert svc.name == "complex" + assert len(svc.procedures) == 1 + + proc = svc.procedures[0] + assert proc.name == "transform" + assert proc.py_name == "transform" + + # Init should have created TypedDicts for nested objects + td_names = {td.name for td in ir.typedicts} + assert "ComplexTransformInit" in td_names + assert "ComplexTransformOutput" in td_names + + # The service error should be extracted (TRANSFORM_FAILED is the + # only non-protocol error, so it gets the unsuffixed name) + assert proc.error_type is not None + assert proc.error_type.annotation == "ComplexTransformError" + + # The union input field → str | float | TypedDict + init_td = next(td for td in ir.typedicts if td.name == "ComplexTransformInit") + input_field = next(f for f in init_td.fields if f.name == "input") + assert "str" in input_field.type_ref.annotation + assert "float" in input_field.type_ref.annotation + + # Tags array of objects + tags_field = next((f for f in init_td.fields if f.name == "tags"), None) + assert tags_field is not None + assert "list[" in tags_field.type_ref.annotation + + +class TestPatternProperties: + """Test codegen handling of patternProperties (dynamic dict keys).""" + + def _convert(self, schema: dict, name: str = "Test"): + from river.codegen.schema import SchemaConverter + + converter = SchemaConverter() + ref = converter._schema_to_typeref(schema, name) + return ref, converter._typedicts + + def test_simple_string_values(self): + """patternProperties with string values → dict[str, str].""" + schema = { + "type": "object", + "patternProperties": { + "^(.*)$": {"type": "string"}, + }, + } + ref, tds = self._convert(schema, "Env") + assert ref.annotation == "dict[str, str]" + assert len(tds) == 0 # no TypedDict emitted + + def test_object_values(self): + """patternProperties with object values → dict[str, TypedDict].""" + schema = { + "type": "object", + "patternProperties": { + "^(.*)$": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "version": {"type": "string"}, + }, + "required": ["name", "version"], + }, + }, + } + ref, tds = self._convert(schema, "Packages") + assert ref.annotation == "dict[str, PackagesValue]" + assert len(tds) == 1 + td = tds[0] + assert td.name == "PackagesValue" + field_names = {f.name for f in td.fields} + assert field_names == {"name", "version"} + + def test_nested_object_values(self): + """patternProperties where values have nested structure.""" + schema = { + "type": "object", + "patternProperties": { + "^(.*)$": { + "type": "object", + "properties": { + "all": { + "type": "array", + "items": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "version": {"type": "string"}, + }, + "required": ["name", "version"], + }, + }, + "required": { + "type": "array", + "items": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "version": {"type": "string"}, + }, + "required": ["name", "version"], + }, + }, + }, + }, + }, + } + ref, tds = self._convert(schema, "InstalledPackages") + assert ref.annotation == "dict[str, InstalledPackagesValue]" + + value_td = next(td for td in tds if td.name == "InstalledPackagesValue") + field_names = {f.name for f in value_td.fields} + assert "all" in field_names + assert "required" in field_names + + all_field = next(f for f in value_td.fields if f.name == "all") + assert "list[" in all_field.type_ref.annotation + + def test_integer_pattern_keys(self): + """patternProperties with integer-only pattern still becomes dict[str, ...].""" + schema = { + "type": "object", + "patternProperties": { + "^(0|[1-9][0-9]*)$": { + "type": "object", + "properties": {"id": {"type": "number"}}, + "required": ["id"], + }, + }, + } + ref, tds = self._convert(schema, "IntMap") + assert ref.annotation == "dict[str, IntMapValue]" + + def test_multiple_patterns_union(self): + """Multiple patternProperties produce a union value type.""" + schema = { + "type": "object", + "patternProperties": { + "^a": {"type": "string"}, + "^b": {"type": "integer"}, + }, + } + ref, tds = self._convert(schema, "Multi") + assert ref.annotation == "dict[str, str | int]" + assert len(tds) == 0 + + def test_multiple_patterns_same_type(self): + """Multiple patternProperties with the same type collapse to one.""" + schema = { + "type": "object", + "patternProperties": { + "^a": {"type": "string"}, + "^b": {"type": "string"}, + }, + } + ref, tds = self._convert(schema, "Same") + assert ref.annotation == "dict[str, str]" + + def test_properties_take_precedence(self): + """Object with properties (not patternProperties) → TypedDict, not dict.""" + schema = { + "type": "object", + "properties": { + "name": {"type": "string"}, + }, + "required": ["name"], + } + ref, tds = self._convert(schema, "Named") + assert ref.annotation == "Named" + assert len(tds) == 1 + + def test_pattern_properties_in_parent_object(self): + """patternProperties nested inside a normal object with properties.""" + schema = { + "type": "object", + "properties": { + "kind": {"const": "packages", "type": "string"}, + "packages": { + "type": "object", + "patternProperties": { + "^(.*)$": { + "type": "object", + "properties": { + "count": {"type": "integer"}, + }, + "required": ["count"], + }, + }, + }, + }, + "required": ["kind", "packages"], + } + ref, tds = self._convert(schema, "Output") + assert ref.annotation == "Output" + + output_td = next(td for td in tds if td.name == "Output") + packages_field = next(f for f in output_td.fields if f.name == "packages") + assert packages_field.type_ref.annotation == "dict[str, OutputPackagesValue]" + + def test_e2e_pattern_properties_codegen(self, tmp_path): + """End-to-end: schema with patternProperties → generated code → importable.""" + from river.codegen.emitter import write_generated_files + from river.codegen.schema import SchemaConverter + + schema = { + "services": { + "registry": { + "procedures": { + "listPackages": { + "type": "rpc", + "input": { + "type": "object", + "properties": { + "language": {"type": "string"}, + }, + "required": ["language"], + }, + "output": { + "type": "object", + "properties": { + "packages": { + "type": "object", + "patternProperties": { + "^(.*)$": { + "type": "object", + "properties": { + "all": { + "type": "array", + "items": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + }, + "required": [ + "name", + "version", + ], + }, + }, + }, + }, + }, + }, + }, + "required": ["packages"], + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string", + }, + "message": {"type": "string"}, + }, + "required": ["code", "message"], + } + ], + }, + }, + }, + }, + }, + } + + converter = SchemaConverter() + ir = converter.convert(schema) + + output_dir = str(tmp_path / "generated") + os.makedirs(output_dir, exist_ok=True) + write_generated_files(ir, output_dir) + + # Verify generated types file is valid Python + types_path = os.path.join(output_dir, "_types.py") + assert os.path.exists(types_path) + with open(types_path) as f: + source = f.read() + + # Should contain dict[str, ...] not an empty TypedDict + assert "dict[str," in source + assert "pass" not in source or "pass" in source.split("class")[0] + + # Verify it compiles + compile(source, types_path, "exec") + + # Import and check the type at runtime + import importlib.util + + spec = importlib.util.spec_from_file_location("_types", types_path) + mod = importlib.util.module_from_spec(spec) + spec.loader.exec_module(mod) # type: ignore + + # The output type should reference the value TypedDict via dict[str, ...] + assert hasattr(mod, "RegistryListPackagesOutput") + assert hasattr(mod, "RegistryListPackagesOutputPackagesValue") + assert hasattr(mod, "RegistryListPackagesOutputPackagesValueAllItem") + + # Check the annotation references dict[str, ...] with the value type + ann = str(mod.RegistryListPackagesOutput.__annotations__["packages"]) + assert "dict[str," in ann + assert "RegistryListPackagesOutputPackagesValue" in ann + + # The value type has an 'all' field with a list of items + value_ann = str( + mod.RegistryListPackagesOutputPackagesValue.__annotations__["all"] + ) + assert "list[" in value_ann diff --git a/python-client/tests/test_e2e.py b/python-client/tests/test_e2e.py new file mode 100644 index 00000000..852b0541 --- /dev/null +++ b/python-client/tests/test_e2e.py @@ -0,0 +1,2054 @@ +"""End-to-end tests for the River Python client. + +Tests the Python client against the TypeScript test server, covering +all four procedure types and core protocol behavior. +""" + +from __future__ import annotations + +import asyncio + +import pytest + +from river.client import RiverClient +from river.codec import BinaryCodec +from river.transport import WebSocketClientTransport +from tests.test_utils import wait_for_connected + +# -- helpers -- + + +async def make_client(server_url: str, **kwargs) -> RiverClient: + """Create a connected RiverClient.""" + transport = WebSocketClientTransport( + ws_url=server_url, + client_id=None, # auto-generate + server_id="SERVER", + codec=BinaryCodec(), + ) + return RiverClient( + transport, + server_id="SERVER", + connect_on_invoke=kwargs.get("connect_on_invoke", True), + eagerly_connect=kwargs.get("eagerly_connect", False), + ) + + +async def cleanup_client(client: RiverClient) -> None: + await client.transport.close() + + +# ===================================================================== +# RPC Tests +# ===================================================================== + + +class TestRpc: + @pytest.mark.asyncio + async def test_rpc_basic(self, server_url: str): + """Basic RPC call returns correct result.""" + client = await make_client(server_url) + try: + result = await client.rpc("test", "add", {"n": 3}) + assert result["ok"] is True + assert result["payload"]["result"] == 3 + finally: + await cleanup_client(client) + + @pytest.mark.asyncio + async def test_fallible_rpc_success(self, server_url: str): + """Fallible RPC returns Ok on valid input.""" + client = await make_client(server_url) + try: + result = await client.rpc("fallible", "divide", {"a": 10, "b": 2}) + assert result["ok"] is True + assert result["payload"]["result"] == 5.0 + finally: + await cleanup_client(client) + + @pytest.mark.asyncio + async def test_fallible_rpc_error(self, server_url: str): + """Fallible RPC returns Err with correct error code.""" + client = await make_client(server_url) + try: + result = await client.rpc("fallible", "divide", {"a": 10, "b": 0}) + assert result["ok"] is False + assert result["payload"]["code"] == "DIV_BY_ZERO" + finally: + await cleanup_client(client) + + @pytest.mark.asyncio + async def test_concurrent_rpcs(self, server_url: str): + """Multiple concurrent RPCs all complete correctly.""" + client = await make_client(server_url) + try: + tasks = [client.rpc("ordering", "add", {"n": i}) for i in range(10)] + results = await asyncio.gather(*tasks) + for i, result in enumerate(results): + assert result["ok"] is True + assert result["payload"]["n"] == i + finally: + await cleanup_client(client) + + +# ===================================================================== +# Stream Tests +# ===================================================================== + + +class TestStream: + @pytest.mark.asyncio + async def test_stream_basic(self, server_url: str): + """Stream echoes messages correctly, skipping ignored ones.""" + client = await make_client(server_url) + try: + stream = client.stream("test", "echo", {}) + + # Write messages + stream.req_writable.write({"msg": "hello", "ignore": False}) + stream.req_writable.write({"msg": "world", "ignore": False}) + stream.req_writable.write({"msg": "skip", "ignore": True}) + stream.req_writable.write({"msg": "end", "ignore": False}) + stream.req_writable.close() + + # Read responses + results = [] + async for msg in stream.res_readable: + results.append(msg) + + assert len(results) == 3 + assert results[0]["ok"] is True + assert results[0]["payload"]["response"] == "hello" + assert results[1]["payload"]["response"] == "world" + assert results[2]["payload"]["response"] == "end" + finally: + await cleanup_client(client) + + @pytest.mark.asyncio + async def test_stream_empty(self, server_url: str): + """Stream with immediate close returns no results.""" + client = await make_client(server_url) + try: + stream = client.stream("test", "echo", {}) + stream.req_writable.close() + + results = await stream.res_readable.collect() + assert len(results) == 0 + finally: + await cleanup_client(client) + + @pytest.mark.asyncio + async def test_stream_with_init_message(self, server_url: str): + """Stream handler receives the init message.""" + client = await make_client(server_url) + try: + stream = client.stream("test", "echoWithPrefix", {"prefix": "test"}) + stream.req_writable.write({"msg": "hello", "ignore": False}) + stream.req_writable.write({"msg": "world", "ignore": False}) + stream.req_writable.close() + + results = await stream.res_readable.collect() + assert len(results) == 2 + assert results[0]["payload"]["response"] == "test hello" + assert results[1]["payload"]["response"] == "test world" + finally: + await cleanup_client(client) + + @pytest.mark.asyncio + async def test_fallible_stream(self, server_url: str): + """Stream correctly propagates both Ok and Err results.""" + client = await make_client(server_url) + try: + stream = client.stream("fallible", "echo", {}) + + # Normal message + stream.req_writable.write( + {"msg": "hello", "throwResult": False, "throwError": False} + ) + done, msg = await stream.res_readable.next() + assert not done + assert msg["ok"] is True + assert msg["payload"]["response"] == "hello" + + # Error result (service-level error) + stream.req_writable.write( + {"msg": "fail", "throwResult": True, "throwError": False} + ) + done, msg = await stream.res_readable.next() + assert not done + assert msg["ok"] is False + assert msg["payload"]["code"] == "STREAM_ERROR" + + # Uncaught error (causes stream cancel) + stream.req_writable.write( + {"msg": "throw", "throwResult": False, "throwError": True} + ) + done, msg = await stream.res_readable.next() + assert not done + assert msg["ok"] is False + assert msg["payload"]["code"] == "UNCAUGHT_ERROR" + finally: + await cleanup_client(client) + + @pytest.mark.asyncio + async def test_concurrent_streams(self, server_url: str): + """Multiple concurrent streams work independently.""" + client = await make_client(server_url) + try: + streams = [] + for _ in range(5): + s = client.stream("test", "echo", {}) + streams.append(s) + + # Write to each stream + for i, s in enumerate(streams): + s.req_writable.write({"msg": f"msg-{i}", "ignore": False}) + s.req_writable.close() + + # Read from each stream + for i, s in enumerate(streams): + results = await s.res_readable.collect() + assert len(results) == 1 + assert results[0]["payload"]["response"] == f"msg-{i}" + finally: + await cleanup_client(client) + + +# ===================================================================== +# Subscription Tests +# ===================================================================== + + +class TestSubscription: + @pytest.mark.asyncio + async def test_subscription_basic(self, server_url: str): + """Subscription receives initial value and updates.""" + client = await make_client(server_url) + try: + sub = client.subscribe("subscribable", "value", {}) + + # Read initial value + done, msg = await sub.res_readable.next() + assert not done + assert msg["ok"] is True + initial_count = msg["payload"]["count"] + + # Trigger an update + add_result = await client.rpc("subscribable", "add", {"n": 1}) + assert add_result["ok"] is True + + # Read updated value + done, msg = await sub.res_readable.next() + assert not done + assert msg["ok"] is True + assert msg["payload"]["count"] == initial_count + 1 + finally: + await cleanup_client(client) + + +# ===================================================================== +# Upload Tests +# ===================================================================== + + +class TestUpload: + @pytest.mark.asyncio + async def test_upload_basic(self, server_url: str): + """Upload sums multiple values correctly.""" + client = await make_client(server_url) + try: + upload = client.upload("uploadable", "addMultiple", {}) + upload.req_writable.write({"n": 1}) + upload.req_writable.write({"n": 2}) + upload.req_writable.close() + + result = await upload.finalize() + assert result["ok"] is True + assert result["payload"]["result"] == 3 + finally: + await cleanup_client(client) + + @pytest.mark.asyncio + async def test_upload_empty(self, server_url: str): + """Upload with no data returns zero.""" + client = await make_client(server_url) + try: + upload = client.upload("uploadable", "addMultiple", {}) + upload.req_writable.close() + + result = await upload.finalize() + assert result["ok"] is True + assert result["payload"]["result"] == 0 + finally: + await cleanup_client(client) + + @pytest.mark.asyncio + async def test_upload_with_init_message(self, server_url: str): + """Upload handler receives the init message.""" + client = await make_client(server_url) + try: + upload = client.upload( + "uploadable", "addMultipleWithPrefix", {"prefix": "test"} + ) + upload.req_writable.write({"n": 1}) + upload.req_writable.write({"n": 2}) + upload.req_writable.close() + + result = await upload.finalize() + assert result["ok"] is True + assert result["payload"]["result"] == "test 3" + finally: + await cleanup_client(client) + + @pytest.mark.asyncio + async def test_upload_server_cancel(self, server_url: str): + """Upload receives server-initiated cancel when limit exceeded.""" + client = await make_client(server_url) + try: + upload = client.upload("uploadable", "cancellableAdd", {}) + upload.req_writable.write({"n": 9}) + upload.req_writable.write({"n": 1}) + # Don't close - server should cancel + + result = await upload.finalize() + assert result["ok"] is False + assert result["payload"]["code"] == "CANCEL" + finally: + await cleanup_client(client) + + +# ===================================================================== +# Disconnect Tests +# ===================================================================== + + +class TestDisconnect: + @pytest.mark.asyncio + async def test_rpc_on_closed_transport(self, server_url: str): + """RPC on a closed transport returns UNEXPECTED_DISCONNECT.""" + client = await make_client(server_url) + await client.transport.close() + + result = await client.rpc("test", "add", {"n": 1}) + assert result["ok"] is False + assert result["payload"]["code"] == "UNEXPECTED_DISCONNECT" + + @pytest.mark.asyncio + async def test_stream_on_closed_transport(self, server_url: str): + """Stream on a closed transport returns UNEXPECTED_DISCONNECT.""" + client = await make_client(server_url) + await client.transport.close() + + stream = client.stream("test", "echo", {}) + done, msg = await stream.res_readable.next() + assert not done + assert msg["ok"] is False + assert msg["payload"]["code"] == "UNEXPECTED_DISCONNECT" + + @pytest.mark.asyncio + async def test_upload_on_closed_transport(self, server_url: str): + """Upload on a closed transport returns UNEXPECTED_DISCONNECT.""" + client = await make_client(server_url) + await client.transport.close() + + upload = client.upload("uploadable", "addMultiple", {}) + assert not upload.req_writable.is_writable() + result = await upload.finalize() + assert result["ok"] is False + assert result["payload"]["code"] == "UNEXPECTED_DISCONNECT" + + @pytest.mark.asyncio + async def test_subscription_on_closed_transport(self, server_url: str): + """Subscription on a closed transport returns UNEXPECTED_DISCONNECT.""" + client = await make_client(server_url) + await client.transport.close() + + sub = client.subscribe("subscribable", "value", {}) + done, msg = await sub.res_readable.next() + assert not done + assert msg["ok"] is False + assert msg["payload"]["code"] == "UNEXPECTED_DISCONNECT" + + +# ===================================================================== +# Client-Initiated Cancellation Tests +# ===================================================================== + + +class TestClientCancellation: + """Tests for client-initiated cancellation via abort signal. + + Uses the cancel.blocking* handlers on the test server which never resolve, + allowing us to test that the client abort properly sends CANCEL and + receives the CANCEL result. + """ + + @pytest.mark.asyncio + async def test_cancel_rpc(self, server_url: str): + """Client abort on RPC returns CANCEL error.""" + client = await make_client(server_url) + try: + abort_evt = asyncio.Event() + + async def do_abort(): + await wait_for_connected(client.transport) + abort_evt.set() + + asyncio.ensure_future(do_abort()) + result = await client.rpc( + "cancel", "blockingRpc", {}, abort_signal=abort_evt + ) + assert result["ok"] is False + assert result["payload"]["code"] == "CANCEL" + finally: + await cleanup_client(client) + + @pytest.mark.asyncio + async def test_cancel_stream(self, server_url: str): + """Client abort on stream returns CANCEL error.""" + client = await make_client(server_url) + try: + abort_evt = asyncio.Event() + stream = client.stream( + "cancel", "blockingStream", {}, abort_signal=abort_evt + ) + await wait_for_connected(client.transport) + abort_evt.set() + + results = await stream.res_readable.collect() + assert len(results) == 1 + assert results[0]["ok"] is False + assert results[0]["payload"]["code"] == "CANCEL" + assert not stream.req_writable.is_writable() + finally: + await cleanup_client(client) + + @pytest.mark.asyncio + async def test_cancel_upload(self, server_url: str): + """Client abort on upload returns CANCEL error.""" + client = await make_client(server_url) + try: + abort_evt = asyncio.Event() + upload = client.upload( + "cancel", "blockingUpload", {}, abort_signal=abort_evt + ) + await wait_for_connected(client.transport) + abort_evt.set() + + result = await upload.finalize() + assert result["ok"] is False + assert result["payload"]["code"] == "CANCEL" + assert not upload.req_writable.is_writable() + finally: + await cleanup_client(client) + + @pytest.mark.asyncio + async def test_cancel_subscription(self, server_url: str): + """Client abort on subscription returns CANCEL error.""" + client = await make_client(server_url) + try: + abort_evt = asyncio.Event() + sub = client.subscribe( + "cancel", "blockingSubscription", {}, abort_signal=abort_evt + ) + await wait_for_connected(client.transport) + abort_evt.set() + + done, msg = await sub.res_readable.next() + assert not done + assert msg["ok"] is False + assert msg["payload"]["code"] == "CANCEL" + finally: + await cleanup_client(client) + + +# ===================================================================== +# Idempotent Close / Post-Close Safety Tests +# ===================================================================== + + +class TestIdempotentClose: + """Tests that closing/aborting after completion is a safe no-op.""" + + @pytest.mark.asyncio + async def test_stream_idempotent_close(self, server_url: str): + """Closing and aborting a stream after it finished is safe.""" + client = await make_client(server_url) + try: + abort_evt = asyncio.Event() + stream = client.stream("test", "echo", {}, abort_signal=abort_evt) + stream.req_writable.write({"msg": "abc", "ignore": False}) + stream.req_writable.close() + + done, msg = await stream.res_readable.next() + assert not done + assert msg["ok"] is True + assert msg["payload"]["response"] == "abc" + + # Abort after stream completed - should be a no-op + abort_evt.set() + + # Drain any remaining messages - should be done or at most a cancel + done, val = await stream.res_readable.next() + # Either the stream is done, or we got a cancel (both ok) + if not done: + assert val["ok"] is False + + # "Accidentally" close again - no crash + stream.req_writable.close() + abort_evt.set() + finally: + await cleanup_client(client) + + @pytest.mark.asyncio + async def test_subscription_idempotent_close(self, server_url: str): + """Aborting a subscription after it was already aborted is safe.""" + client = await make_client(server_url) + try: + abort_evt = asyncio.Event() + sub = client.subscribe("subscribable", "value", {}, abort_signal=abort_evt) + # Read initial value + done, msg = await sub.res_readable.next() + assert not done + assert msg["ok"] is True + + # Abort + abort_evt.set() + + # Read the cancel + done, msg = await sub.res_readable.next() + assert not done + assert msg["ok"] is False + assert msg["payload"]["code"] == "CANCEL" + + # "Accidentally" abort again + abort_evt.set() + finally: + await cleanup_client(client) + + @pytest.mark.asyncio + async def test_cancellation_after_transport_close(self, server_url: str): + """Closing/aborting after transport close doesn't crash.""" + client = await make_client(server_url) + try: + abort_evt = asyncio.Event() + stream = client.stream("test", "echo", {}, abort_signal=abort_evt) + stream.req_writable.write({"msg": "1", "ignore": False}) + done, msg = await stream.res_readable.next() + assert not done + assert msg["payload"]["response"] == "1" + + # Close the transport + await client.transport.close() + + # Closing writable after transport close should be safe + stream.req_writable.close() + # Aborting after transport close should be safe + abort_evt.set() + # No crash = success + finally: + # Transport already closed + pass + + +# ===================================================================== +# Eagerly Connect Test +# ===================================================================== + + +class TestEagerConnect: + @pytest.mark.asyncio + async def test_eagerly_connect(self, server_url: str): + """eagerlyConnect creates a connection before any procedure call.""" + transport = WebSocketClientTransport( + ws_url=server_url, + server_id="SERVER", + codec=BinaryCodec(), + ) + client = RiverClient(transport, server_id="SERVER", eagerly_connect=True) + try: + await wait_for_connected(transport) + assert len(transport.sessions) > 0 + # Verify the connection works by making a call + result = await client.rpc("test", "add", {"n": 1}) + assert result["ok"] is True + finally: + await transport.close() + + +# ===================================================================== +# Transparent Reconnect Tests +# ===================================================================== + + +class TestTransparentReconnect: + @pytest.mark.asyncio + async def test_reconnect_with_concurrent_streams(self, server_url: str): + """Multiple concurrent streams survive a connection drop and reconnect.""" + transport = WebSocketClientTransport( + ws_url=server_url, + server_id="SERVER", + codec=BinaryCodec(), + ) + client = RiverClient(transport, server_id="SERVER") + try: + # Open three concurrent streams with different prefixes + stream_a = client.stream("test", "echoWithPrefix", {"prefix": "A"}) + stream_b = client.stream("test", "echoWithPrefix", {"prefix": "B"}) + stream_c = client.stream("test", "echoWithPrefix", {"prefix": "C"}) + + # Send initial messages on each stream and verify they work + stream_a.req_writable.write({"msg": "1", "ignore": False}) + stream_b.req_writable.write({"msg": "1", "ignore": False}) + stream_c.req_writable.write({"msg": "1", "ignore": False}) + + done_a, msg_a = await stream_a.res_readable.next() + done_b, msg_b = await stream_b.res_readable.next() + done_c, msg_c = await stream_c.res_readable.next() + + assert not done_a and msg_a["payload"]["response"] == "A 1" + assert not done_b and msg_b["payload"]["response"] == "B 1" + assert not done_c and msg_c["payload"]["response"] == "C 1" + + # Force-close the WebSocket to simulate a network drop + session = transport.sessions.get("SERVER") + assert session is not None + assert session._ws is not None + await session._ws.close() + + # Wait for reconnection + await wait_for_connected(transport) + + # Send more messages on all three streams after reconnect + stream_a.req_writable.write({"msg": "2", "ignore": False}) + stream_b.req_writable.write({"msg": "2", "ignore": False}) + stream_c.req_writable.write({"msg": "2", "ignore": False}) + + # Close all streams + stream_a.req_writable.close() + stream_b.req_writable.close() + stream_c.req_writable.close() + + # Verify the post-reconnect messages arrived correctly + done_a2, msg_a2 = await stream_a.res_readable.next() + done_b2, msg_b2 = await stream_b.res_readable.next() + done_c2, msg_c2 = await stream_c.res_readable.next() + + assert not done_a2 and msg_a2["payload"]["response"] == "A 2" + assert not done_b2 and msg_b2["payload"]["response"] == "B 2" + assert not done_c2 and msg_c2["payload"]["response"] == "C 2" + + # Streams should close cleanly + done_a3, _ = await stream_a.res_readable.next() + done_b3, _ = await stream_b.res_readable.next() + done_c3, _ = await stream_c.res_readable.next() + assert done_a3 + assert done_b3 + assert done_c3 + finally: + await transport.close() + + +# ===================================================================== +# Codec Tests +# ===================================================================== + + +class TestCodec: + @pytest.mark.asyncio + async def test_binary_codec_roundtrip(self): + """Binary (msgpack) codec encodes and decodes transport messages.""" + from river.codec import BinaryCodec, CodecMessageAdapter + from river.types import TransportMessage + + adapter = CodecMessageAdapter(BinaryCodec()) + msg = TransportMessage( + id="test123", + from_="client", + to="server", + seq=1, + ack=0, + payload={"data": "hello"}, + stream_id="s1", + control_flags=0, + ) + ok, buf = adapter.to_buffer(msg) + assert ok is True + ok, decoded = adapter.from_buffer(buf) + assert ok is True + assert decoded.payload == {"data": "hello"} + + +# ===================================================================== +# Stream Unit Tests +# ===================================================================== + + +class TestReadable: + @pytest.mark.asyncio + async def test_readable_close(self): + """Closing a readable makes it done.""" + from river.streams import Readable + + r: Readable = Readable() + r._trigger_close() + assert r.is_closed() + + @pytest.mark.asyncio + async def test_readable_iterate(self): + """Can iterate over pushed values.""" + from river.streams import Readable + + r: Readable = Readable() + r._push_value({"ok": True, "payload": 1}) + r._push_value({"ok": True, "payload": 2}) + r._trigger_close() + + results = await r.collect() + assert len(results) == 2 + assert results[0]["payload"] == 1 + assert results[1]["payload"] == 2 + + @pytest.mark.asyncio + async def test_readable_push_after_close_raises(self): + """Pushing to a closed readable raises.""" + from river.streams import Readable + + r: Readable = Readable() + r._trigger_close() + with pytest.raises(RuntimeError): + r._push_value({"ok": True, "payload": 1}) + + @pytest.mark.asyncio + async def test_readable_double_close_raises(self): + """Closing a readable twice raises.""" + from river.streams import Readable + + r: Readable = Readable() + r._trigger_close() + with pytest.raises(RuntimeError): + r._trigger_close() + + @pytest.mark.asyncio + async def test_readable_break(self): + """Breaking a readable yields broken error on next read.""" + from river.streams import Readable + + r: Readable = Readable() + r._push_value({"ok": True, "payload": 1}) + # Grab iterator before break (since break locks the stream) + done, val = await r.next() + assert not done + assert val["payload"] == 1 + r.break_() + done, val = await r.next() + assert not done + assert val["ok"] is False + assert val["payload"]["code"] == "READABLE_BROKEN" + r._trigger_close() + + @pytest.mark.asyncio + async def test_readable_async_for(self): + """Works with async for loop.""" + from river.streams import Readable + + r: Readable = Readable() + r._push_value({"ok": True, "payload": "a"}) + r._push_value({"ok": True, "payload": "b"}) + r._trigger_close() + + values = [] + async for item in r: + values.append(item) + assert len(values) == 2 + + +class TestWritable: + def test_writable_write(self): + """Write callback is invoked.""" + from river.streams import Writable + + received = [] + w: Writable = Writable(write_cb=received.append) + w.write(1) + w.write(2) + assert received == [1, 2] + + def test_writable_close(self): + """Close callback is invoked once.""" + from river.streams import Writable + + close_count = [0] + w: Writable = Writable( + write_cb=lambda x: None, + close_cb=lambda: close_count.__setitem__(0, close_count[0] + 1), + ) + assert w.is_writable() + w.close() + assert not w.is_writable() + assert close_count[0] == 1 + + def test_writable_idempotent_close(self): + """Closing multiple times only invokes callback once.""" + from river.streams import Writable + + close_count = [0] + w: Writable = Writable( + write_cb=lambda x: None, + close_cb=lambda: close_count.__setitem__(0, close_count[0] + 1), + ) + w.close() + w.close() + w.close() + assert close_count[0] == 1 + + def test_writable_write_after_close_raises(self): + """Writing after close raises.""" + from river.streams import Writable + + w: Writable = Writable(write_cb=lambda x: None) + w.close() + with pytest.raises(RuntimeError): + w.write(42) + + def test_writable_close_with_value(self): + """Close with a final value writes it before closing.""" + from river.streams import Writable + + received = [] + w: Writable = Writable(write_cb=received.append) + w.close(42) + assert received == [42] + assert w.is_closed() + + +# ===================================================================== +# Types Unit Tests +# ===================================================================== + + +class TestTypes: + def test_generate_id_length(self): + """Generated IDs are 12 characters.""" + from river.types import generate_id + + for _ in range(100): + assert len(generate_id()) == 12 + + def test_generate_id_unique(self): + """Generated IDs are unique.""" + from river.types import generate_id + + ids = {generate_id() for _ in range(1000)} + assert len(ids) == 1000 + + def test_control_flags(self): + """Control flag bit operations work correctly.""" + from river.types import ( + ControlFlags, + is_ack, + is_stream_cancel, + is_stream_close, + is_stream_open, + ) + + assert is_ack(ControlFlags.AckBit) + assert not is_ack(0) + assert is_stream_open(ControlFlags.StreamOpenBit) + assert is_stream_close(ControlFlags.StreamClosedBit) + assert is_stream_cancel(ControlFlags.StreamCancelBit) + + # Combined flags + combined = ControlFlags.StreamOpenBit | ControlFlags.StreamClosedBit + assert is_stream_open(combined) + assert is_stream_close(combined) + assert not is_ack(combined) + + def test_transport_message_roundtrip(self): + """TransportMessage serializes and deserializes correctly.""" + from river.types import TransportMessage + + msg = TransportMessage( + id="test123", + from_="client1", + to="server1", + seq=5, + ack=3, + payload={"data": "hello"}, + stream_id="stream1", + control_flags=0, + service_name="myService", + procedure_name="myProc", + ) + d = msg.to_dict() + assert d["from"] == "client1" + assert d["to"] == "server1" + assert d["serviceName"] == "myService" + + msg2 = TransportMessage.from_dict(d) + assert msg2.from_ == "client1" + assert msg2.seq == 5 + assert msg2.service_name == "myService" + + +# ===================================================================== +# Codec Unit Tests +# ===================================================================== + + +class TestReadableLocking: + """Tests for Readable stream locking semantics (mirrors TS streams.test.ts).""" + + @pytest.mark.asyncio + async def test_lock_on_aiter(self): + """__aiter__ locks the stream; second call raises TypeError.""" + from river.streams import Readable + + r: Readable = Readable() + r.__aiter__() + assert not r.is_readable() + with pytest.raises(TypeError): + r.__aiter__() + r._trigger_close() + + @pytest.mark.asyncio + async def test_lock_on_collect(self): + """collect() locks the stream; __aiter__ raises TypeError.""" + from river.streams import Readable + + r: Readable = Readable() + # Don't await - just start collect (it will block waiting for close) + collect_task = asyncio.ensure_future(r.collect()) + await asyncio.sleep(0) # yield to let collect start + assert not r.is_readable() + with pytest.raises(TypeError): + r.__aiter__() + r._trigger_close() + await collect_task + + @pytest.mark.asyncio + async def test_lock_on_break(self): + """break_() locks the stream; __aiter__ raises TypeError.""" + from river.streams import Readable + + r: Readable = Readable() + r.break_() + assert not r.is_readable() + with pytest.raises(TypeError): + r.__aiter__() + r._trigger_close() + + @pytest.mark.asyncio + async def test_raw_iter_from_aiter(self): + """Can use the raw iterator from __aiter__.""" + from river.streams import Readable + + r: Readable = Readable() + it = r.__aiter__() + next_p = it.__anext__() + r._push_value({"ok": True, "payload": 1}) + val = await next_p + assert val == {"ok": True, "payload": 1} + next_p2 = it.__anext__() + r._trigger_close() + with pytest.raises(StopAsyncIteration): + await next_p2 + + +class TestReadableIteration: + """Tests for Readable iteration edge cases (mirrors TS streams.test.ts).""" + + @pytest.mark.asyncio + async def test_values_pushed_before_close(self): + """Can iterate values that were pushed before close.""" + from river.streams import Readable + + r: Readable = Readable() + r._push_value({"ok": True, "payload": 1}) + r._push_value({"ok": True, "payload": 2}) + r._push_value({"ok": True, "payload": 3}) + r._trigger_close() + done, val = await r.next() + assert not done and val["payload"] == 1 + done, val = await r.next() + assert not done and val["payload"] == 2 + done, val = await r.next() + assert not done and val["payload"] == 3 + done, val = await r.next() + assert done + + @pytest.mark.asyncio + async def test_eager_iteration(self): + """Read before push resolves in order.""" + from river.streams import Readable + + r: Readable = Readable() + # Start reading before values are pushed + t1 = asyncio.ensure_future(r.next()) + t2 = asyncio.ensure_future(r.next()) + # Give tasks a chance to start waiting + await asyncio.sleep(0) + r._push_value({"ok": True, "payload": 1}) + r._push_value({"ok": True, "payload": 2}) + done1, val1 = await t1 + done2, val2 = await t2 + assert not done1 and val1["payload"] == 1 + assert not done2 and val2["payload"] == 2 + # Third read + close + t3 = asyncio.ensure_future(r.next()) + await asyncio.sleep(0) + r._push_value({"ok": True, "payload": 3}) + r._trigger_close() + done3, val3 = await t3 + assert not done3 and val3["payload"] == 3 + done4, _ = await r.next() + assert done4 + + @pytest.mark.asyncio + async def test_not_resolve_until_push(self): + """Pending next() doesn't resolve until push or close.""" + from river.streams import Readable + + r: Readable = Readable() + next_p = asyncio.ensure_future(r.next()) + # Should not resolve yet + try: + await asyncio.wait_for(asyncio.shield(next_p), timeout=0.01) + except asyncio.TimeoutError: + pass + done = next_p.done() + assert not done, "next() should not resolve before push" + + r._push_value({"ok": True, "payload": 1}) + await asyncio.sleep(0) + done_v, val = await next_p + assert not done_v and val["payload"] == 1 + + # isDone should not resolve until close + done_p = asyncio.ensure_future(r.next()) + await asyncio.sleep(0.01) + assert not done_p.done(), "next() should not resolve before close" + r._trigger_close() + done_v2, _ = await done_p + assert done_v2 + + @pytest.mark.asyncio + async def test_collect_after_close(self): + """collect() returns all values when called after close.""" + from river.streams import Readable + + r: Readable = Readable() + r._push_value({"ok": True, "payload": 1}) + r._push_value({"ok": True, "payload": 2}) + r._push_value({"ok": True, "payload": 3}) + r._trigger_close() + results = await r.collect() + assert len(results) == 3 + assert [v["payload"] for v in results] == [1, 2, 3] + + @pytest.mark.asyncio + async def test_collect_waits_for_close(self): + """collect() doesn't resolve until the stream is closed.""" + from river.streams import Readable + + r: Readable = Readable() + r._push_value({"ok": True, "payload": 1}) + collect_task = asyncio.ensure_future(r.collect()) + r._push_value({"ok": True, "payload": 2}) + r._push_value({"ok": True, "payload": 3}) + await asyncio.sleep(0.01) + assert not collect_task.done(), "collect should not resolve before close" + r._push_value({"ok": True, "payload": 4}) + r._trigger_close() + results = await collect_task + assert len(results) == 4 + assert [v["payload"] for v in results] == [1, 2, 3, 4] + + @pytest.mark.asyncio + async def test_async_for_with_break(self): + """Breaking out of async for mid-stream stops iteration.""" + from river.streams import Readable + + r: Readable = Readable() + r._push_value({"ok": True, "payload": 1}) + r._push_value({"ok": True, "payload": 2}) + assert r._has_values_in_queue() + values = [] + async for item in r: + values.append(item) + assert r._has_values_in_queue() + break + # After break, remaining values should be discarded (broken) + assert not r._has_values_in_queue() + + @pytest.mark.asyncio + async def test_error_results_in_iteration(self): + """Error results are yielded as part of iteration.""" + from river.streams import Readable + + r: Readable = Readable() + r._push_value({"ok": True, "payload": 1}) + r._push_value({"ok": True, "payload": 2}) + r._push_value( + {"ok": False, "payload": {"code": "SOME_ERROR", "message": "err"}} + ) + r._trigger_close() + results = [] + async for item in r: + results.append(item) + assert len(results) == 3 + assert results[0]["ok"] is True + assert results[1]["ok"] is True + assert results[2]["ok"] is False + assert results[2]["payload"]["code"] == "SOME_ERROR" + + +class TestReadableBreakVariants: + """Tests for Readable break() edge cases (mirrors TS streams.test.ts).""" + + @pytest.mark.asyncio + async def test_break_signals_next(self): + """break() signals the next read call.""" + from river.streams import Readable + + r: Readable = Readable() + r.break_() + done, val = await r.next() + assert not done + assert val["ok"] is False + assert val["payload"]["code"] == "READABLE_BROKEN" + r._trigger_close() + + @pytest.mark.asyncio + async def test_break_signals_pending(self): + """break() signals a pending read.""" + from river.streams import Readable + + r: Readable = Readable() + pending = asyncio.ensure_future(r.next()) + await asyncio.sleep(0) + r.break_() + done, val = await pending + assert not done + assert val["ok"] is False + assert val["payload"]["code"] == "READABLE_BROKEN" + r._trigger_close() + + @pytest.mark.asyncio + async def test_break_with_queued_value(self): + """break() clears queue and yields broken error.""" + from river.streams import Readable + + r: Readable = Readable() + r._push_value({"ok": True, "payload": 1}) + assert r._has_values_in_queue() + r.break_() + assert not r._has_values_in_queue() + done, val = await r.next() + assert not done + assert val["payload"]["code"] == "READABLE_BROKEN" + r._trigger_close() + + @pytest.mark.asyncio + async def test_break_with_queued_value_after_close(self): + """break() after close with queued values still yields broken error.""" + from river.streams import Readable + + r: Readable = Readable() + r._push_value({"ok": True, "payload": 1}) + r._trigger_close() + r.break_() + done, val = await r.next() + assert not done + assert val["payload"]["code"] == "READABLE_BROKEN" + + @pytest.mark.asyncio + async def test_break_empty_queue_after_close(self): + """break() after close with empty queue -> done.""" + from river.streams import Readable + + r: Readable = Readable() + r._trigger_close() + r.break_() + done, _ = await r.next() + assert done + + @pytest.mark.asyncio + async def test_break_ends_iteration_midstream(self): + """break() during async for ends iteration.""" + from river.streams import Readable + + r: Readable = Readable() + r._push_value({"ok": True, "payload": 1}) + r._push_value({"ok": True, "payload": 2}) + r._push_value({"ok": True, "payload": 3}) + + results = [] + i = 0 + async for item in r: + if i == 0: + assert item["payload"] == 1 + r.break_() + elif i == 1: + assert item["ok"] is False + assert item["payload"]["code"] == "READABLE_BROKEN" + results.append(item) + i += 1 + assert i == 2 + + +class TestCodecUnit: + def test_binary_codec_encode_decode(self): + """Binary (msgpack) codec round-trips correctly.""" + from river.codec import BinaryCodec + + codec = BinaryCodec() + obj = {"key": "value", "num": 42, "nested": {"a": [1, 2, 3]}} + buf = codec.to_buffer(obj) + assert isinstance(buf, bytes) + result = codec.from_buffer(buf) + assert result == obj + + def test_codec_adapter_valid(self): + """CodecMessageAdapter encodes and decodes transport messages.""" + from river.codec import BinaryCodec, CodecMessageAdapter + from river.types import TransportMessage + + adapter = CodecMessageAdapter(BinaryCodec()) + msg = TransportMessage( + id="abc", + from_="c1", + to="s1", + seq=0, + ack=0, + payload={"type": "ACK"}, + stream_id="heartbeat", + control_flags=1, + ) + ok, buf = adapter.to_buffer(msg) + assert ok is True + + ok, result = adapter.from_buffer(buf) + assert ok is True + assert result.id == "abc" + assert result.from_ == "c1" + + def test_codec_adapter_invalid_buffer(self): + """CodecMessageAdapter returns error on invalid bytes.""" + from river.codec import BinaryCodec, CodecMessageAdapter + + adapter = CodecMessageAdapter(BinaryCodec()) + ok, result = adapter.from_buffer(b"not valid json") + assert ok is False + assert isinstance(result, str) + + def test_codec_adapter_rejects_wrong_seq_type(self): + """CodecMessageAdapter rejects seq that is not an int.""" + from river.codec import BinaryCodec, CodecMessageAdapter + + adapter = CodecMessageAdapter(BinaryCodec()) + raw = BinaryCodec().to_buffer( + { + "id": "m1", + "from": "s", + "to": "c", + "seq": "0", # wrong type + "ack": 0, + "payload": {}, + "streamId": "st1", + } + ) + ok, result = adapter.from_buffer(raw) + assert ok is False + assert "seq" in result + assert "str" in result + + def test_codec_adapter_rejects_wrong_ack_type(self): + """CodecMessageAdapter rejects ack that is not an int.""" + from river.codec import BinaryCodec, CodecMessageAdapter + + adapter = CodecMessageAdapter(BinaryCodec()) + raw = BinaryCodec().to_buffer( + { + "id": "m1", + "from": "s", + "to": "c", + "seq": 0, + "ack": "0", # wrong type + "payload": {}, + "streamId": "st1", + } + ) + ok, result = adapter.from_buffer(raw) + assert ok is False + assert "ack" in result + + def test_codec_adapter_rejects_missing_control_flags(self): + """CodecMessageAdapter rejects messages without controlFlags.""" + from river.codec import BinaryCodec, CodecMessageAdapter + + adapter = CodecMessageAdapter(BinaryCodec()) + raw = BinaryCodec().to_buffer( + { + "id": "m1", + "from": "s", + "to": "c", + "seq": 0, + "ack": 0, + "payload": {}, + "streamId": "st1", + # controlFlags omitted + } + ) + ok, result = adapter.from_buffer(raw) + assert ok is False + assert "controlFlags" in result + + def test_binary_codec_bigint_js_safe_range(self): + """Ints beyond JS MAX_SAFE_INTEGER use bigint extension.""" + from river.codec import BinaryCodec + + codec = BinaryCodec() + just_over = 2**53 + 1 + buf = codec.to_buffer({"n": just_over}) + decoded = codec.from_buffer(buf) + assert decoded["n"] == just_over + + # Value at the boundary should still be a normal int + at_boundary = 2**53 - 1 + buf2 = codec.to_buffer({"n": at_boundary}) + decoded2 = codec.from_buffer(buf2) + assert decoded2["n"] == at_boundary + + def test_binary_codec_negative_bigint_js_safe_range(self): + """Negative ints beyond -MAX_SAFE_INTEGER use bigint extension.""" + from river.codec import BinaryCodec + + codec = BinaryCodec() + just_under = -(2**53 + 1) + buf = codec.to_buffer({"n": just_under}) + decoded = codec.from_buffer(buf) + assert decoded["n"] == just_under + + def test_binary_codec_bigint_uses_ext_type(self): + """Large ints are encoded as msgpack ExtType, not native ints.""" + import msgpack + + from river.codec import BinaryCodec + + codec = BinaryCodec() + big = 2**53 + 1 + buf = codec.to_buffer({"n": big}) + # Unpack raw (without ext_hook) to verify the value is an ExtType + raw = msgpack.unpackb(buf, raw=False) + assert isinstance(raw["n"], msgpack.ExtType) + assert raw["n"].code == 0 + + def test_binary_codec_bigint_nested(self): + """Large ints nested in lists and dicts are encoded as ExtType.""" + from river.codec import BinaryCodec + + codec = BinaryCodec() + big = 2**53 + 1 + obj = {"a": [big], "b": {"c": big}} + decoded = codec.from_buffer(codec.to_buffer(obj)) + assert decoded["a"][0] == big + assert decoded["b"]["c"] == big + + def test_binary_codec_bool_not_treated_as_bigint(self): + """Booleans (subclass of int) should not be converted to ExtType.""" + from river.codec import BinaryCodec + + codec = BinaryCodec() + decoded = codec.from_buffer(codec.to_buffer({"flag": True})) + assert decoded["flag"] is True + + +# ===================================================================== +# Lifecycle / Cleanup Tests +# ===================================================================== + + +class TestListenerCleanup: + """Verify that event listeners are cleaned up after cancel/disconnect.""" + + @pytest.mark.asyncio + async def test_cancel_cleans_up_listeners(self, server_url: str): + """Cancelling a stream removes transport event listeners.""" + transport = WebSocketClientTransport( + ws_url=server_url, + server_id="SERVER", + codec=BinaryCodec(), + ) + client = RiverClient(transport, server_id="SERVER") + try: + before_msg = transport._events.listener_count("message") + before_ss = transport._events.listener_count("sessionStatus") + + abort = asyncio.Event() + stream = client.stream("cancel", "blockingStream", {}, abort_signal=abort) + # Each stream registers +1 message and +1 sessionStatus listener + assert transport._events.listener_count("message") == before_msg + 1 + + abort.set() + await stream.res_readable.next() # consume CANCEL error + + assert transport._events.listener_count("message") == before_msg + assert transport._events.listener_count("sessionStatus") == before_ss + finally: + await transport.close() + + @pytest.mark.asyncio + async def test_repeated_cancels_do_not_leak(self, server_url: str): + """Many cancelled streams don't accumulate stale listeners.""" + transport = WebSocketClientTransport( + ws_url=server_url, + server_id="SERVER", + codec=BinaryCodec(), + ) + client = RiverClient(transport, server_id="SERVER") + try: + before = transport._events.listener_count("message") + + for _ in range(20): + abort = asyncio.Event() + stream = client.stream( + "cancel", "blockingStream", {}, abort_signal=abort + ) + abort.set() + await stream.res_readable.next() + + assert transport._events.listener_count("message") == before + finally: + await transport.close() + + @pytest.mark.asyncio + async def test_abort_task_cancelled_on_normal_close(self, server_url: str): + """Abort watcher task is cancelled when stream completes normally.""" + transport = WebSocketClientTransport( + ws_url=server_url, + server_id="SERVER", + codec=BinaryCodec(), + ) + client = RiverClient(transport, server_id="SERVER") + try: + # Create a stream with an abort signal that is never set + abort = asyncio.Event() + stream = client.stream("test", "echo", {}, abort_signal=abort) + + stream.req_writable.write({"msg": "hi", "ignore": False}) + done, msg = await stream.res_readable.next() + assert not done and msg["ok"] is True + + stream.req_writable.close() + # Wait for server to close the stream + done2, _ = await stream.res_readable.next() + assert done2 + + # Setting the signal now should be harmless (no stale cancel) + abort.set() + finally: + await transport.close() + + +class TestUploadFinalize: + """Verify upload finalize closes the request stream.""" + + @pytest.mark.asyncio + async def test_finalize_without_explicit_close(self, server_url: str): + """finalize() closes req_writable if caller didn't.""" + client = await make_client(server_url) + try: + upload = client.upload("uploadable", "addMultiple", {}) + upload.req_writable.write({"n": 5}) + upload.req_writable.write({"n": 3}) + # Don't call upload.req_writable.close() — finalize should do it + result = await asyncio.wait_for(upload.finalize(), timeout=5.0) + assert result["ok"] is True + assert result["payload"]["result"] == 8 + finally: + await cleanup_client(client) + + @pytest.mark.asyncio + async def test_finalize_after_explicit_close(self, server_url: str): + """finalize() works when req_writable was already closed.""" + client = await make_client(server_url) + try: + upload = client.upload("uploadable", "addMultiple", {}) + upload.req_writable.write({"n": 2}) + upload.req_writable.close() + result = await asyncio.wait_for(upload.finalize(), timeout=5.0) + assert result["ok"] is True + assert result["payload"]["result"] == 2 + finally: + await cleanup_client(client) + + +# ===================================================================== +# Protocol conformance tests +# ===================================================================== + + +class TestProtocolConformance: + """Tests verifying protocol-level conformance.""" + + def test_handshake_stream_id_is_random(self): + """Handshake streamId should be a random ID, not a fixed string. + + The protocol requires a random streamId for handshakes. + """ + from river.codec import BinaryCodec, CodecMessageAdapter + from river.session import Session + + codec = CodecMessageAdapter(BinaryCodec()) + s1 = Session("sess1", "client", "server", codec) + s2 = Session("sess2", "client", "server", codec) + + hs1 = s1.create_handshake_request() + hs2 = s2.create_handshake_request() + + # streamId should NOT be the fixed string "handshake" + assert hs1.stream_id != "handshake" + # streamId should be random (different between sessions) + assert hs1.stream_id != hs2.stream_id + # Should have a reasonable length (like generate_id output) + assert len(hs1.stream_id) > 8 + + def test_readable_push_after_break_is_noop(self): + """push_value after break_() should not buffer (memory leak fix).""" + from river.streams import Readable + + r: Readable = Readable() + r._push_value({"val": 1}) + r.break_() + # After break, queue should be cleared + assert not r._has_values_in_queue() + # Pushing more values should be silently discarded + r._push_value({"val": 2}) + r._push_value({"val": 3}) + assert not r._has_values_in_queue() + + def test_writable_close_nullifies_callbacks(self): + """After close(), write/close callbacks should not be invocable. + + Callbacks should be nullified after close to prevent reuse. + """ + from river.streams import Writable + + write_count = [0] + close_count = [0] + w: Writable = Writable( + write_cb=lambda x: write_count.__setitem__(0, write_count[0] + 1), + close_cb=lambda: close_count.__setitem__(0, close_count[0] + 1), + ) + w.close() + assert close_count[0] == 1 + + # After close, the callbacks should not fire again even if we + # bypass the _closed check (internal invariant) + w._closed = False + w.close() + # Should still be 1 since callbacks were nullified + assert close_count[0] == 1 + + def test_heartbeat_stream_id_is_fixed(self): + """Heartbeat streamId should be the fixed string 'heartbeat'.""" + from river.types import heartbeat_message + + hb = heartbeat_message() + assert hb.stream_id == "heartbeat" + + def test_handshake_payload_matches_ts_schema(self): + """Handshake request payload has all required fields.""" + from river.types import PROTOCOL_VERSION, handshake_request_payload + + payload = handshake_request_payload( + session_id="test-session", + next_expected_seq=0, + next_sent_seq=0, + metadata={"token": "abc"}, + ) + assert payload["type"] == "HANDSHAKE_REQ" + assert payload["protocolVersion"] == PROTOCOL_VERSION + assert payload["sessionId"] == "test-session" + assert payload["expectedSessionState"]["nextExpectedSeq"] == 0 + assert payload["expectedSessionState"]["nextSentSeq"] == 0 + assert payload["metadata"] == {"token": "abc"} + + def test_handshake_payload_omits_metadata_when_none(self): + """Handshake without metadata should not include metadata field.""" + from river.types import handshake_request_payload + + payload = handshake_request_payload( + session_id="test-session", + next_expected_seq=0, + next_sent_seq=0, + metadata=None, + ) + assert "metadata" not in payload + + +class TestFatalErrorPaths: + """Regression tests for fatal error paths that must destroy the session. + + Certain errors are not retryable and must immediately destroy + the session. + """ + + def test_failed_send_destroys_session(self): + """Send failure on a connected session destroys it.""" + from unittest.mock import AsyncMock + + from river.codec import BinaryCodec, CodecMessageAdapter + from river.session import Session, SessionState + from river.transport import WebSocketClientTransport + + transport = WebSocketClientTransport( + ws_url="ws://127.0.0.1:1", + client_id="client", + server_id="server", + codec=BinaryCodec(), + ) + codec = CodecMessageAdapter(BinaryCodec()) + session = Session("s1", "client", "server", codec) + session.state = SessionState.CONNECTED + session._ws = AsyncMock() + transport.sessions["server"] = session + + send_fn = transport.get_session_bound_send_fn("server", "s1") + + # A payload that can't be serialized (set is not JSON-serializable) + from river.types import PartialTransportMessage + + try: + send_fn( + PartialTransportMessage( + payload={"bad": {1, 2}}, + stream_id="x", + control_flags=0, + ) + ) + except RuntimeError: + pass + + # Session must be destroyed + assert transport.sessions.get("server") is None + + def test_failed_send_seq_consumed(self): + """Send failure does not roll back seq. + + The seq is consumed and the session is destroyed instead. + """ + from unittest.mock import AsyncMock + + from river.codec import BinaryCodec, CodecMessageAdapter + from river.session import Session, SessionState + from river.types import PartialTransportMessage + + codec = CodecMessageAdapter(BinaryCodec()) + session = Session("s1", "client", "server", codec) + session.state = SessionState.CONNECTED + session._ws = AsyncMock() + + initial_seq = session.seq + + ok, _ = session.send( + PartialTransportMessage( + payload={"bad": {1, 2}}, + stream_id="x", + control_flags=0, + ) + ) + + assert not ok + # seq was consumed (not rolled back) + assert session.seq == initial_seq + 1 + + def test_invalid_message_destroys_session(self): + """Receiving a corrupt message destroys the session.""" + from river.codec import BinaryCodec, CodecMessageAdapter + from river.session import Session, SessionState + from river.transport import WebSocketClientTransport + + transport = WebSocketClientTransport( + ws_url="ws://127.0.0.1:1", + client_id="client", + server_id="server", + codec=BinaryCodec(), + ) + codec = CodecMessageAdapter(BinaryCodec()) + session = Session("s1", "client", "server", codec) + session.state = SessionState.CONNECTED + transport.sessions["server"] = session + + errors: list[dict] = [] + transport.add_event_listener("protocolError", lambda e: errors.append(e)) + + # Feed garbage bytes + transport._on_message_data(session, b"not valid json", "server") + + # Session must be destroyed + assert transport.sessions.get("server") is None + assert len(errors) == 1 + assert errors[0]["type"] == "invalid_message" + + @pytest.mark.asyncio + async def test_malformed_handshake_status_closes_ws(self): + """Non-dict handshake status closes the socket cleanly.""" + from unittest.mock import AsyncMock + + from river.codec import BinaryCodec, CodecMessageAdapter + from river.session import Session, SessionState + from river.transport import WebSocketClientTransport + from river.types import TransportMessage + + transport = WebSocketClientTransport( + ws_url="ws://127.0.0.1:1", + client_id="client", + server_id="server", + codec=BinaryCodec(), + ) + codec_adapter = CodecMessageAdapter(BinaryCodec()) + session = Session("s1", "client", "server", codec_adapter) + session.state = SessionState.HANDSHAKING + transport.sessions["server"] = session + + # Build a handshake response with non-dict status + resp_msg = TransportMessage( + id="hs", + from_="server", + to="client", + seq=0, + ack=0, + payload={ + "type": "HANDSHAKE_RESP", + "status": "oops", # should be dict + }, + stream_id="heartbeat", + control_flags=1, + ) + ok, resp_bytes = codec_adapter.to_buffer(resp_msg) + assert ok + + ws = AsyncMock() + ws.recv = AsyncMock(return_value=resp_bytes) + ws.close = AsyncMock() + + await transport._do_handshake(session, ws, "server") + + # WebSocket should have been closed + ws.close.assert_awaited_once() + # Session should be deleted + assert "server" not in transport.sessions + + def test_readable_broken_after_async_for_break(self): + """Breaking out of async for marks readable as broken.""" + from river.streams import Readable + + r: Readable = Readable() + r._push_value({"ok": True, "payload": 1}) + + # Simulate what async for + break does: create iterator, get + # one value, then let the iterator be GC'd + it = r.__aiter__() + # The __del__ should mark broken + del it + + assert r._broken + # Subsequent pushes should be no-ops + r._push_value({"ok": True, "payload": 2}) + assert not r._has_values_in_queue() + + def test_frozen_session_options(self): + """SessionOptions is frozen — mutation raises.""" + from river.session import SessionOptions + + opts = SessionOptions() + try: + opts.heartbeat_interval_ms = 999 # type: ignore[misc] + raise AssertionError("should have raised FrozenInstanceError") + except AttributeError: + pass # frozen dataclass raises AttributeError on mutation + + def test_binary_codec_large_int(self): + """Binary codec handles ints beyond msgpack native range.""" + from river.codec import BinaryCodec + + codec = BinaryCodec() + large = 10**30 + buf = codec.to_buffer({"n": large}) + decoded = codec.from_buffer(buf) + assert decoded["n"] == large + + @pytest.mark.asyncio + async def test_next_rejects_after_aiter_lock(self): + """next() raises TypeError if stream is locked by __aiter__.""" + from river.streams import Readable + + r: Readable = Readable() + r._push_value({"ok": True, "payload": 1}) + _it = r.__aiter__() # locks by consumer + + with pytest.raises(TypeError, match="already locked"): + await r.next() + + @pytest.mark.asyncio + async def test_next_rejects_after_collect_lock(self): + """next() raises TypeError if stream is locked by collect().""" + from river.streams import Readable + + r: Readable = Readable() + r._push_value({"ok": True, "payload": 1}) + r._trigger_close() + + await r.collect() # locks by consumer + with pytest.raises(TypeError, match="already locked"): + await r.next() + + @pytest.mark.asyncio + async def test_iterator_del_marks_broken_and_wakes(self): + """Dropping an iterator marks the stream as broken and wakes waiters.""" + from river.streams import Readable + + r: Readable = Readable() + r._push_value({"ok": True, "payload": 1}) + + # Iterate and break out + async for _item in r: + break + + # Stream should be broken (iterator __del__ ran) + assert r._broken + # Push after break should be a no-op + r._push_value({"ok": True, "payload": 2}) + assert not r._has_values_in_queue() + + @pytest.mark.asyncio + async def test_connection_failed_starts_grace_period(self, server_url: str): + """Connection failure starts grace period so session eventually dies.""" + from river.session import SessionOptions + from tests.test_utils import wait_for_session_gone + + transport = WebSocketClientTransport( + ws_url="ws://127.0.0.1:1", # unreachable + client_id=None, + server_id="UNREACHABLE", + codec=BinaryCodec(), + options=SessionOptions( + connection_timeout_ms=100, + session_disconnect_grace_ms=300, + ), + ) + transport.reconnect_on_connection_drop = False + try: + transport.connect("UNREACHABLE") + # Connection will fail; grace period starts + await wait_for_session_gone(transport, "UNREACHABLE") + finally: + await transport.close() + + def test_enable_transparent_reconnects_option(self): + """enable_transparent_reconnects=False disables reconnect.""" + from river.session import SessionOptions + + opts = SessionOptions(enable_transparent_reconnects=False) + transport = WebSocketClientTransport( + ws_url="ws://127.0.0.1:1", + client_id=None, + server_id="SERVER", + codec=BinaryCodec(), + options=opts, + ) + assert transport.reconnect_on_connection_drop is False + + def test_literal_const_escaping(self): + """String consts with quotes/backslashes/control chars are escaped.""" + from river.codegen.schema import SchemaConverter + + converter = SchemaConverter() + schema = {"const": 'a"b'} + ref = converter._schema_to_typeref(schema, "Test") + assert ref.annotation == 'Literal["a\\"b"]' + + schema2 = {"const": "a\\b"} + ref2 = converter._schema_to_typeref(schema2, "Test") + assert ref2.annotation == 'Literal["a\\\\b"]' + + # Control characters must be escaped + schema3 = {"const": "line1\nline2"} + ref3 = converter._schema_to_typeref(schema3, "Test") + assert ref3.annotation == 'Literal["line1\\nline2"]' + + schema4 = {"const": "a\tb"} + ref4 = converter._schema_to_typeref(schema4, "Test") + assert ref4.annotation == 'Literal["a\\tb"]' + + def test_is_closed_with_buffered_data(self): + """is_closed() is False when closed but queue has data.""" + from river.streams import Readable + + r: Readable = Readable() + r._push_value({"val": 1}) + r._trigger_close() + # Closed but not fully consumed + assert r.is_closed() is False + assert r._closed is True + + @pytest.mark.asyncio + async def test_close_cancels_inflight_connect(self, server_url: str): + """close() during handshake doesn't leak the websocket.""" + transport = WebSocketClientTransport( + ws_url=server_url, + client_id=None, + server_id="SERVER", + codec=BinaryCodec(), + ) + transport.connect("SERVER") + # Let connection start but don't wait for completion + await asyncio.sleep(0) + await transport.close() + # No leaked sessions + assert len(transport.sessions) == 0 + + +# ===================================================================== +# OTel Tracing Propagation Tests +# ===================================================================== + + +class TestOtelTracingPropagation: + def test_handshake_includes_tracing_when_otel_available(self): + """Handshake message includes tracing when OTel propagation is configured.""" + + from river.codec import BinaryCodec, CodecMessageAdapter + from river.session import Session + + session = Session( + session_id="test-session", + from_id="client", + to_id="server", + codec=CodecMessageAdapter(BinaryCodec()), + ) + + tracing = { + "traceparent": "00-abc123-def456-01", + "tracestate": "vendor=value", + } + msg = session.create_handshake_request(tracing=tracing) + assert msg.tracing == tracing + wire = msg.to_dict() + assert wire["tracing"] == tracing + + def test_handshake_omits_tracing_when_none(self): + """Handshake message omits tracing when not provided.""" + from river.codec import BinaryCodec, CodecMessageAdapter + from river.session import Session + + session = Session( + session_id="test-session", + from_id="client", + to_id="server", + codec=CodecMessageAdapter(BinaryCodec()), + ) + + msg = session.create_handshake_request() + assert msg.tracing is None + wire = msg.to_dict() + assert "tracing" not in wire + + def test_get_otel_propagation_context_with_mock(self): + """_get_otel_propagation_context extracts traceparent/tracestate.""" + from unittest.mock import patch + + transport = WebSocketClientTransport( + ws_url="ws://localhost:0", + client_id="test", + server_id="SERVER", + codec=BinaryCodec(), + ) + + def fake_inject(carrier): + carrier["traceparent"] = "00-tid-sid-01" + carrier["tracestate"] = "k=v" + + with patch( + "river.transport.propagate.inject", + side_effect=fake_inject, + ): + result = transport._get_otel_propagation_context() + + assert result == { + "traceparent": "00-tid-sid-01", + "tracestate": "k=v", + } + + +# ===================================================================== +# Eager connect in sync context +# ===================================================================== + + +class TestEagerConnectSync: + def test_eager_connect_raises_outside_loop(self): + """Constructing with eagerly_connect=True outside an event loop + raises RuntimeError rather than silently binding to a dead loop.""" + transport = WebSocketClientTransport( + ws_url="ws://127.0.0.1:1", + server_id="SERVER", + codec=BinaryCodec(), + ) + with pytest.raises(RuntimeError, match="no running event loop"): + RiverClient(transport, server_id="SERVER", eagerly_connect=True) + + +# ===================================================================== +# Cancel frame validation +# ===================================================================== + + +class TestCancelFrameValidation: + @pytest.mark.asyncio + async def test_server_cancel_always_error_shaped(self, server_url: str): + """Server-initiated cancel always yields an error result.""" + client = await make_client(server_url) + try: + # cancellableAdd cancels when total >= 10 + upload = client.upload("uploadable", "cancellableAdd", {}) + upload.req_writable.write({"n": 15}) + # Server sends cancel with Err payload + result = await asyncio.wait_for(upload.finalize(), timeout=5.0) + assert result["ok"] is False + assert result["payload"]["code"] == "CANCEL" + finally: + await cleanup_client(client) + + def test_cancel_frame_ok_true_forced_to_error(self): + """A cancel payload with ok:true is coerced to error shape.""" + from river.types import ControlFlags, err_result + + # Simulate what on_message does with a cancel frame + payload = {"ok": True, "payload": {"unexpected": "success"}} + flags = ControlFlags.StreamCancelBit + + # The fix: cancel frames with ok:True get forced to err_result + from river.types import is_stream_cancel + + assert is_stream_cancel(flags) + # After the fix, the code checks `not payload["ok"]` — so + # ok:True falls through to the error branch + if isinstance(payload, dict) and "ok" in payload and not payload["ok"]: + result = payload + else: + code = ( + payload.get("code", "UNKNOWN") + if isinstance(payload, dict) + else "UNKNOWN" + ) + message = ( + payload.get("message", str(payload)) + if isinstance(payload, dict) + else str(payload) + ) + result = err_result(code, message) + + assert result["ok"] is False diff --git a/python-client/tests/test_equivalence.py b/python-client/tests/test_equivalence.py new file mode 100644 index 00000000..d1dc27bf --- /dev/null +++ b/python-client/tests/test_equivalence.py @@ -0,0 +1,681 @@ +"""Equivalence tests for the River Python client. + +Tests all procedure types against the TS test server using BinaryCodec. +""" + +from __future__ import annotations + +import asyncio + +import pytest + +from river.client import RiverClient +from river.codec import Codec +from river.session import SessionOptions +from river.transport import WebSocketClientTransport +from tests.test_utils import wait_for_connected, wait_for_session_gone + +# -- helpers -- + + +async def make_client( + url: str, codec: Codec, options: SessionOptions | None = None +) -> RiverClient: + transport = WebSocketClientTransport( + ws_url=url, + client_id=None, + server_id="SERVER", + codec=codec, + options=options, + ) + return RiverClient( + transport, + server_id="SERVER", + connect_on_invoke=True, + eagerly_connect=False, + ) + + +async def cleanup(client: RiverClient) -> None: + await client.transport.close() + + +# ===================================================================== +# RPC Equivalence +# ===================================================================== + + +class TestRpcEquivalence: + @pytest.mark.asyncio + async def test_basic_rpc(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + try: + result = await client.rpc("test", "add", {"n": 3}) + assert result["ok"] is True + # test.add uses a global accumulator, so just verify it returns a number + assert isinstance(result["payload"]["result"], (int, float)) + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_fallible_rpc_success(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + try: + result = await client.rpc("fallible", "divide", {"a": 10, "b": 2}) + assert result["ok"] is True + assert result["payload"]["result"] == 5.0 + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_fallible_rpc_div_by_zero(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + try: + result = await client.rpc("fallible", "divide", {"a": 10, "b": 0}) + assert result["ok"] is False + assert result["payload"]["code"] == "DIV_BY_ZERO" + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_concurrent_rpcs(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + try: + tasks = [client.rpc("ordering", "add", {"n": i}) for i in range(10)] + results = await asyncio.gather(*tasks) + for i, result in enumerate(results): + assert result["ok"] is True + assert result["payload"]["n"] == i + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_rpc_on_closed_transport(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + await client.transport.close() + + result = await client.rpc("test", "add", {"n": 1}) + assert result["ok"] is False + assert result["payload"]["code"] == "UNEXPECTED_DISCONNECT" + + @pytest.mark.asyncio + async def test_binary_echo(self, codec_and_url: tuple[Codec, str]): + """Binary roundtrip — data passes through codec correctly.""" + codec, url = codec_and_url + client = await make_client(url, codec) + try: + test_data = b"\x00\x01\x02\xff\xfe\xfd" + result = await client.rpc("test", "echoBinary", {"data": test_data}) + assert result["ok"] is True + assert result["payload"]["length"] == len(test_data) + returned = result["payload"]["data"] + if isinstance(returned, (bytes, bytearray)): + assert bytes(returned) == test_data + finally: + await cleanup(client) + + +# ===================================================================== +# Stream Equivalence +# ===================================================================== + + +class TestStreamEquivalence: + @pytest.mark.asyncio + async def test_basic_echo_stream(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + try: + stream = client.stream("test", "echo", {}) + stream.req_writable.write({"msg": "hello", "ignore": False}) + stream.req_writable.write({"msg": "world", "ignore": False}) + stream.req_writable.write({"msg": "skip", "ignore": True}) + stream.req_writable.write({"msg": "end", "ignore": False}) + stream.req_writable.close() + + results = await stream.res_readable.collect() + assert len(results) == 3 + assert results[0]["payload"]["response"] == "hello" + assert results[1]["payload"]["response"] == "world" + assert results[2]["payload"]["response"] == "end" + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_stream_with_init_message(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + try: + stream = client.stream("test", "echoWithPrefix", {"prefix": "pfx"}) + stream.req_writable.write({"msg": "hello", "ignore": False}) + stream.req_writable.write({"msg": "world", "ignore": False}) + stream.req_writable.close() + + results = await stream.res_readable.collect() + assert len(results) == 2 + assert results[0]["payload"]["response"] == "pfx hello" + assert results[1]["payload"]["response"] == "pfx world" + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_empty_stream(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + try: + stream = client.stream("test", "echo", {}) + stream.req_writable.close() + results = await stream.res_readable.collect() + assert len(results) == 0 + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_fallible_stream_ok(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + try: + stream = client.stream("fallible", "echo", {}) + stream.req_writable.write( + {"msg": "hi", "throwResult": False, "throwError": False} + ) + done, msg = await stream.res_readable.next() + assert not done + assert msg["ok"] is True + assert msg["payload"]["response"] == "hi" + stream.req_writable.close() + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_fallible_stream_err(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + try: + stream = client.stream("fallible", "echo", {}) + stream.req_writable.write( + {"msg": "fail", "throwResult": True, "throwError": False} + ) + done, msg = await stream.res_readable.next() + assert not done + assert msg["ok"] is False + assert msg["payload"]["code"] == "STREAM_ERROR" + stream.req_writable.close() + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_fallible_stream_uncaught(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + try: + stream = client.stream("fallible", "echo", {}) + stream.req_writable.write( + {"msg": "throw", "throwResult": False, "throwError": True} + ) + done, msg = await stream.res_readable.next() + assert not done + assert msg["ok"] is False + assert msg["payload"]["code"] == "UNCAUGHT_ERROR" + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_concurrent_streams(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + try: + streams = [] + for _ in range(5): + s = client.stream("test", "echo", {}) + streams.append(s) + + for i, s in enumerate(streams): + s.req_writable.write({"msg": f"msg-{i}", "ignore": False}) + s.req_writable.close() + + for i, s in enumerate(streams): + results = await s.res_readable.collect() + assert len(results) == 1 + assert results[0]["payload"]["response"] == f"msg-{i}" + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_stream_on_closed_transport(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + await client.transport.close() + + stream = client.stream("test", "echo", {}) + done, msg = await stream.res_readable.next() + assert not done + assert msg["ok"] is False + assert msg["payload"]["code"] == "UNEXPECTED_DISCONNECT" + + +# ===================================================================== +# Upload Equivalence +# ===================================================================== + + +class TestUploadEquivalence: + @pytest.mark.asyncio + async def test_basic_upload(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + try: + upload = client.upload("uploadable", "addMultiple", {}) + upload.req_writable.write({"n": 1}) + upload.req_writable.write({"n": 2}) + upload.req_writable.close() + + result = await upload.finalize() + assert result["ok"] is True + assert result["payload"]["result"] == 3 + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_empty_upload(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + try: + upload = client.upload("uploadable", "addMultiple", {}) + upload.req_writable.close() + + result = await upload.finalize() + assert result["ok"] is True + assert result["payload"]["result"] == 0 + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_upload_with_init_message(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + try: + upload = client.upload( + "uploadable", "addMultipleWithPrefix", {"prefix": "total"} + ) + upload.req_writable.write({"n": 5}) + upload.req_writable.write({"n": 7}) + upload.req_writable.close() + + result = await upload.finalize() + assert result["ok"] is True + assert result["payload"]["result"] == "total 12" + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_upload_server_cancel(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + try: + upload = client.upload("uploadable", "cancellableAdd", {}) + upload.req_writable.write({"n": 9}) + upload.req_writable.write({"n": 1}) + + result = await upload.finalize() + assert result["ok"] is False + assert result["payload"]["code"] == "CANCEL" + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_upload_finalize_auto_closes(self, codec_and_url: tuple[Codec, str]): + """finalize() auto-closes writable if not yet closed.""" + codec, url = codec_and_url + client = await make_client(url, codec) + try: + upload = client.upload("uploadable", "addMultiple", {}) + upload.req_writable.write({"n": 4}) + result = await upload.finalize() + assert result["ok"] is True + assert result["payload"]["result"] == 4 + assert not upload.req_writable.is_writable() + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_upload_on_closed_transport(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + await client.transport.close() + + upload = client.upload("uploadable", "addMultiple", {}) + assert not upload.req_writable.is_writable() + result = await upload.finalize() + assert result["ok"] is False + assert result["payload"]["code"] == "UNEXPECTED_DISCONNECT" + + +# ===================================================================== +# Subscription Equivalence +# ===================================================================== + + +class TestSubscriptionEquivalence: + @pytest.mark.asyncio + async def test_subscription_initial_and_update( + self, codec_and_url: tuple[Codec, str] + ): + codec, url = codec_and_url + client = await make_client(url, codec) + try: + sub = client.subscribe("subscribable", "value", {}) + + done, msg = await sub.res_readable.next() + assert not done + assert msg["ok"] is True + initial_count = msg["payload"]["count"] + + add_result = await client.rpc("subscribable", "add", {"n": 1}) + assert add_result["ok"] is True + + done, msg = await sub.res_readable.next() + assert not done + assert msg["ok"] is True + assert msg["payload"]["count"] == initial_count + 1 + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_subscription_abort(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + try: + abort_evt = asyncio.Event() + sub = client.subscribe("subscribable", "value", {}, abort_signal=abort_evt) + + done, msg = await sub.res_readable.next() + assert not done + assert msg["ok"] is True + + abort_evt.set() + + done, msg = await sub.res_readable.next() + assert not done + assert msg["ok"] is False + assert msg["payload"]["code"] == "CANCEL" + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_subscription_on_closed_transport( + self, codec_and_url: tuple[Codec, str] + ): + codec, url = codec_and_url + client = await make_client(url, codec) + await client.transport.close() + + sub = client.subscribe("subscribable", "value", {}) + done, msg = await sub.res_readable.next() + assert not done + assert msg["ok"] is False + assert msg["payload"]["code"] == "UNEXPECTED_DISCONNECT" + + +# ===================================================================== +# Cancellation Equivalence +# ===================================================================== + + +class TestCancellationEquivalence: + @pytest.mark.asyncio + async def test_cancel_rpc(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + try: + abort_evt = asyncio.Event() + + async def trigger(): + # Wait for connection, then cancel + await wait_for_connected(client.transport) + abort_evt.set() + + asyncio.ensure_future(trigger()) + result = await client.rpc( + "cancel", "blockingRpc", {}, abort_signal=abort_evt + ) + assert result["ok"] is False + assert result["payload"]["code"] == "CANCEL" + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_cancel_stream(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + try: + abort_evt = asyncio.Event() + stream = client.stream( + "cancel", "blockingStream", {}, abort_signal=abort_evt + ) + await wait_for_connected(client.transport) + abort_evt.set() + + results = await stream.res_readable.collect() + assert len(results) == 1 + assert results[0]["ok"] is False + assert results[0]["payload"]["code"] == "CANCEL" + assert not stream.req_writable.is_writable() + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_cancel_upload(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + try: + abort_evt = asyncio.Event() + upload = client.upload( + "cancel", "blockingUpload", {}, abort_signal=abort_evt + ) + await wait_for_connected(client.transport) + abort_evt.set() + + result = await upload.finalize() + assert result["ok"] is False + assert result["payload"]["code"] == "CANCEL" + assert not upload.req_writable.is_writable() + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_cancel_subscription(self, codec_and_url: tuple[Codec, str]): + codec, url = codec_and_url + client = await make_client(url, codec) + try: + abort_evt = asyncio.Event() + sub = client.subscribe( + "cancel", + "blockingSubscription", + {}, + abort_signal=abort_evt, + ) + await wait_for_connected(client.transport) + abort_evt.set() + + done, msg = await sub.res_readable.next() + assert not done + assert msg["ok"] is False + assert msg["payload"]["code"] == "CANCEL" + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_cancel_after_completion_is_noop( + self, codec_and_url: tuple[Codec, str] + ): + """Cancelling after the procedure completed doesn't crash.""" + codec, url = codec_and_url + client = await make_client(url, codec) + try: + abort_evt = asyncio.Event() + result = await client.rpc( + "cancel", "immediateRpc", {}, abort_signal=abort_evt + ) + assert result["ok"] is True + assert result["payload"]["done"] is True + + # Cancel after completion — should be safe no-op + abort_evt.set() + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_cancel_after_transport_close_is_safe( + self, codec_and_url: tuple[Codec, str] + ): + """Cancelling after transport close doesn't crash.""" + codec, url = codec_and_url + client = await make_client(url, codec) + abort_evt = asyncio.Event() + await client.rpc("cancel", "immediateRpc", {}, abort_signal=abort_evt) + await client.transport.close() + + abort_evt.set() + + +# ===================================================================== +# Disconnect Equivalence +# ===================================================================== + + +class TestDisconnectEquivalence: + @pytest.mark.asyncio + async def test_all_proc_types_on_closed_transport( + self, codec_and_url: tuple[Codec, str] + ): + """All 4 procedure types return UNEXPECTED_DISCONNECT on closed transport.""" + codec, url = codec_and_url + client = await make_client(url, codec) + await client.transport.close() + + result = await client.rpc("test", "add", {"n": 1}) + assert result["ok"] is False + assert result["payload"]["code"] == "UNEXPECTED_DISCONNECT" + + stream = client.stream("test", "echo", {}) + done, msg = await stream.res_readable.next() + assert msg["ok"] is False + assert msg["payload"]["code"] == "UNEXPECTED_DISCONNECT" + + upload = client.upload("uploadable", "addMultiple", {}) + uresult = await upload.finalize() + assert uresult["ok"] is False + assert uresult["payload"]["code"] == "UNEXPECTED_DISCONNECT" + + sub = client.subscribe("subscribable", "value", {}) + done, msg = await sub.res_readable.next() + assert msg["ok"] is False + assert msg["payload"]["code"] == "UNEXPECTED_DISCONNECT" + + @pytest.mark.asyncio + async def test_mid_stream_disconnect(self, codec_and_url: tuple[Codec, str]): + """Force-closing the WS mid-stream produces disconnect error.""" + codec, url = codec_and_url + short_opts = SessionOptions(session_disconnect_grace_ms=200) + client = await make_client(url, codec, options=short_opts) + try: + # Disable reconnect so session gets destroyed + client.transport.reconnect_on_connection_drop = False + + stream = client.stream("test", "echo", {}) + stream.req_writable.write({"msg": "before", "ignore": False}) + + done, msg = await stream.res_readable.next() + assert not done + assert msg["ok"] is True + assert msg["payload"]["response"] == "before" + + session = client.transport.sessions.get("SERVER") + assert session is not None + if session._ws is not None: + await session._ws.close() + + await wait_for_session_gone(client.transport) + + # Session destroyed → stream gets UNEXPECTED_DISCONNECT + done, msg = await stream.res_readable.next() + if not done: + assert msg["ok"] is False + assert msg["payload"]["code"] == "UNEXPECTED_DISCONNECT" + finally: + await cleanup(client) + + +# ===================================================================== +# Ordering Equivalence +# ===================================================================== + + +class TestOrderingEquivalence: + @pytest.mark.asyncio + async def test_concurrent_rpc_ordering(self, codec_and_url: tuple[Codec, str]): + """N concurrent RPCs to ordering service all arrive, responses match.""" + codec, url = codec_and_url + client = await make_client(url, codec) + try: + n = 20 + tasks = [client.rpc("ordering", "add", {"n": i}) for i in range(n)] + results = await asyncio.gather(*tasks) + + returned_ns = [] + for r in results: + assert r["ok"] is True + returned_ns.append(r["payload"]["n"]) + + assert sorted(returned_ns) == list(range(n)) + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_ordering_preserved_across_disconnects( + self, codec_and_url: tuple[Codec, str] + ): + """50 RPCs with forced disconnects at msg 10 and 42 — all arrive. + + Mirrors the TS e2e.test.ts 'message order is preserved in the face + of disconnects' test. + """ + codec, url = codec_and_url + client = await make_client(url, codec) + try: + session = client.transport.sessions.get("SERVER") + # Warm up connection + warm = await client.rpc("ordering", "add", {"n": -1}) + assert warm["ok"] is True + session = client.transport.sessions.get("SERVER") + assert session is not None + + tasks = [] + for i in range(50): + # Force-close WS at specific points + if i == 10 or i == 42: + ws = session._ws + if ws is not None: + await ws.close() + + tasks.append(client.rpc("ordering", "add", {"n": i})) + + results = await asyncio.gather(*tasks) + for r in results: + assert r["ok"] is True + + # Verify all 50 messages arrived at the server + get_result = await client.rpc("ordering", "getAll", {}) + assert get_result["ok"] is True + msgs = get_result["payload"]["msgs"] + # All values 0-49 should be present (plus the -1 warmup) + for i in range(50): + assert i in msgs, f"message {i} missing from server" + finally: + await cleanup(client) diff --git a/python-client/tests/test_handshake.py b/python-client/tests/test_handshake.py new file mode 100644 index 00000000..8c1b831a --- /dev/null +++ b/python-client/tests/test_handshake.py @@ -0,0 +1,121 @@ +"""Handshake metadata tests. + +Tests custom handshake metadata using a dedicated test server +that requires {token: string} in the handshake. +""" + +from __future__ import annotations + +import pytest + +from river.client import RiverClient +from river.codec import BinaryCodec +from river.transport import WebSocketClientTransport +from tests.test_utils import wait_for_connected, wait_for_event + + +async def make_handshake_client( + server_url: str, + handshake_metadata: dict | None = None, +) -> RiverClient: + transport = WebSocketClientTransport( + ws_url=server_url, + client_id=None, + server_id="HANDSHAKE_SERVER", + codec=BinaryCodec(), + handshake_metadata=handshake_metadata, + ) + return RiverClient( + transport, + server_id="HANDSHAKE_SERVER", + connect_on_invoke=True, + eagerly_connect=False, + ) + + +async def cleanup(client: RiverClient) -> None: + await client.transport.close() + + +class TestHandshake: + @pytest.mark.asyncio + async def test_handshake_with_valid_metadata(self, handshake_server_url: str): + """Client with valid handshake metadata can make RPCs.""" + client = await make_handshake_client( + handshake_server_url, + handshake_metadata={"token": "valid-token"}, + ) + try: + result = await client.rpc("test", "echo", {"msg": "hello"}) + assert result["ok"] is True + assert result["payload"]["response"] == "hello" + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_handshake_with_invalid_metadata_emits_error( + self, handshake_server_url: str + ): + """Client with invalid token triggers a protocolError event.""" + transport = WebSocketClientTransport( + ws_url=handshake_server_url, + client_id=None, + server_id="HANDSHAKE_SERVER", + codec=BinaryCodec(), + handshake_metadata={"token": "wrong-token"}, + ) + try: + transport.connect("HANDSHAKE_SERVER") + evt = await wait_for_event(transport, "protocolError") + assert evt["type"] in ( + "handshake_failed", + "conn_retry_exceeded", + ) + finally: + await transport.close() + + @pytest.mark.asyncio + async def test_handshake_with_missing_metadata_emits_error( + self, handshake_server_url: str + ): + """Client with no metadata triggers a protocolError event.""" + transport = WebSocketClientTransport( + ws_url=handshake_server_url, + client_id=None, + server_id="HANDSHAKE_SERVER", + codec=BinaryCodec(), + handshake_metadata=None, + ) + try: + transport.connect("HANDSHAKE_SERVER") + evt = await wait_for_event(transport, "protocolError") + assert evt is not None + finally: + await transport.close() + + @pytest.mark.asyncio + async def test_handshake_metadata_across_reconnect(self, handshake_server_url: str): + """Metadata is resent when reconnecting.""" + client = await make_handshake_client( + handshake_server_url, + handshake_metadata={"token": "valid-token"}, + ) + try: + result = await client.rpc("test", "echo", {"msg": "first"}) + assert result["ok"] is True + + session = client.transport.sessions.get("HANDSHAKE_SERVER") + assert session is not None + + ws = session._ws + if ws is not None: + await ws.close() + + # Wait for reconnect to complete + await wait_for_connected(client.transport, "HANDSHAKE_SERVER") + + result = await client.rpc("test", "echo", {"msg": "after-reconnect"}) + assert result["ok"] is True + assert result["payload"]["response"] == "after-reconnect" + finally: + await cleanup(client) diff --git a/python-client/tests/test_schema.json b/python-client/tests/test_schema.json new file mode 100644 index 00000000..96d0f025 --- /dev/null +++ b/python-client/tests/test_schema.json @@ -0,0 +1,2918 @@ +{ + "services": { + "test": { + "procedures": { + "add": { + "init": { + "type": "object", + "properties": { + "n": { + "type": "number" + } + }, + "required": [ + "n" + ] + }, + "output": { + "type": "object", + "properties": { + "result": { + "type": "number" + } + }, + "required": [ + "result" + ] + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "rpc" + }, + "echo": { + "init": { + "type": "object", + "properties": {} + }, + "output": { + "type": "object", + "properties": { + "response": { + "type": "string" + } + }, + "required": [ + "response" + ] + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "stream", + "input": { + "type": "object", + "properties": { + "msg": { + "type": "string" + }, + "ignore": { + "type": "boolean" + } + }, + "required": [ + "msg" + ] + } + }, + "echoWithPrefix": { + "init": { + "type": "object", + "properties": { + "prefix": { + "type": "string" + } + }, + "required": [ + "prefix" + ] + }, + "output": { + "type": "object", + "properties": { + "response": { + "type": "string" + } + }, + "required": [ + "response" + ] + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "stream", + "input": { + "type": "object", + "properties": { + "msg": { + "type": "string" + }, + "ignore": { + "type": "boolean" + } + }, + "required": [ + "msg" + ] + } + }, + "echoBinary": { + "init": { + "type": "object", + "properties": { + "data": { + "type": "Uint8Array" + } + }, + "required": [ + "data" + ] + }, + "output": { + "type": "object", + "properties": { + "data": { + "type": "Uint8Array" + }, + "length": { + "type": "number" + } + }, + "required": [ + "data", + "length" + ] + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "rpc" + }, + "echoRecursive": { + "init": { + "$id": "T0", + "type": "object", + "properties": { + "value": { + "type": "string" + }, + "children": { + "type": "array", + "items": { + "$ref": "T0" + } + } + }, + "required": [ + "value" + ] + }, + "output": { + "$id": "T0", + "type": "object", + "properties": { + "value": { + "type": "string" + }, + "children": { + "type": "array", + "items": { + "$ref": "T0" + } + } + }, + "required": [ + "value" + ] + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "rpc" + } + } + }, + "ordering": { + "procedures": { + "add": { + "init": { + "type": "object", + "properties": { + "n": { + "type": "number" + } + }, + "required": [ + "n" + ] + }, + "output": { + "type": "object", + "properties": { + "n": { + "type": "number" + } + }, + "required": [ + "n" + ] + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "rpc" + }, + "getAll": { + "init": { + "type": "object", + "properties": {} + }, + "output": { + "type": "object", + "properties": { + "msgs": { + "type": "array", + "items": { + "type": "number" + } + } + }, + "required": [ + "msgs" + ] + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "rpc" + } + } + }, + "fallible": { + "procedures": { + "divide": { + "init": { + "type": "object", + "properties": { + "a": { + "type": "number" + }, + "b": { + "type": "number" + } + }, + "required": [ + "a", + "b" + ] + }, + "output": { + "type": "object", + "properties": { + "result": { + "type": "number" + } + }, + "required": [ + "result" + ] + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "DIV_BY_ZERO", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INFINITY", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "rpc" + }, + "echo": { + "init": { + "type": "object", + "properties": {} + }, + "output": { + "type": "object", + "properties": { + "response": { + "type": "string" + } + }, + "required": [ + "response" + ] + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "STREAM_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "stream", + "input": { + "type": "object", + "properties": { + "msg": { + "type": "string" + }, + "throwResult": { + "type": "boolean" + }, + "throwError": { + "type": "boolean" + } + }, + "required": [ + "msg" + ] + } + } + } + }, + "subscribable": { + "procedures": { + "add": { + "init": { + "type": "object", + "properties": { + "n": { + "type": "number" + } + }, + "required": [ + "n" + ] + }, + "output": { + "type": "object", + "properties": { + "result": { + "type": "number" + } + }, + "required": [ + "result" + ] + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "rpc" + }, + "value": { + "init": { + "type": "object", + "properties": {} + }, + "output": { + "type": "object", + "properties": { + "count": { + "type": "number" + } + }, + "required": [ + "count" + ] + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "subscription" + } + } + }, + "uploadable": { + "procedures": { + "addMultiple": { + "init": { + "type": "object", + "properties": {} + }, + "output": { + "type": "object", + "properties": { + "result": { + "type": "number" + } + }, + "required": [ + "result" + ] + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "upload", + "input": { + "type": "object", + "properties": { + "n": { + "type": "number" + } + }, + "required": [ + "n" + ] + } + }, + "addMultipleWithPrefix": { + "init": { + "type": "object", + "properties": { + "prefix": { + "type": "string" + } + }, + "required": [ + "prefix" + ] + }, + "output": { + "type": "object", + "properties": { + "result": { + "type": "string" + } + }, + "required": [ + "result" + ] + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "upload", + "input": { + "type": "object", + "properties": { + "n": { + "type": "number" + } + }, + "required": [ + "n" + ] + } + }, + "cancellableAdd": { + "init": { + "type": "object", + "properties": {} + }, + "output": { + "type": "object", + "properties": { + "result": { + "type": "number" + } + }, + "required": [ + "result" + ] + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "upload", + "input": { + "type": "object", + "properties": { + "n": { + "type": "number" + } + }, + "required": [ + "n" + ] + } + } + } + }, + "cancel": { + "procedures": { + "blockingRpc": { + "init": { + "type": "object", + "properties": {} + }, + "output": { + "type": "object", + "properties": {} + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "rpc" + }, + "blockingStream": { + "init": { + "type": "object", + "properties": {} + }, + "output": { + "type": "object", + "properties": {} + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "stream", + "input": { + "type": "object", + "properties": {} + } + }, + "blockingUpload": { + "init": { + "type": "object", + "properties": {} + }, + "output": { + "type": "object", + "properties": {} + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "upload", + "input": { + "type": "object", + "properties": {} + } + }, + "blockingSubscription": { + "init": { + "type": "object", + "properties": {} + }, + "output": { + "type": "object", + "properties": {} + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "subscription" + }, + "immediateRpc": { + "init": { + "type": "object", + "properties": {} + }, + "output": { + "type": "object", + "properties": { + "done": { + "type": "boolean" + } + }, + "required": [ + "done" + ] + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "rpc" + }, + "immediateStream": { + "init": { + "type": "object", + "properties": {} + }, + "output": { + "type": "object", + "properties": { + "done": { + "type": "boolean" + } + }, + "required": [ + "done" + ] + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "stream", + "input": { + "type": "object", + "properties": {} + } + }, + "immediateUpload": { + "init": { + "type": "object", + "properties": {} + }, + "output": { + "type": "object", + "properties": { + "done": { + "type": "boolean" + } + }, + "required": [ + "done" + ] + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "upload", + "input": { + "type": "object", + "properties": {} + } + }, + "immediateSubscription": { + "init": { + "type": "object", + "properties": {} + }, + "output": { + "type": "object", + "properties": { + "done": { + "type": "boolean" + } + }, + "required": [ + "done" + ] + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "subscription" + }, + "countedStream": { + "init": { + "type": "object", + "properties": { + "total": { + "type": "number" + } + }, + "required": [ + "total" + ] + }, + "output": { + "type": "object", + "properties": { + "i": { + "type": "number" + } + }, + "required": [ + "i" + ] + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "path", + "message" + ] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": [ + "firstValidationErrors", + "totalErrors" + ] + } + }, + "required": [ + "code", + "message" + ] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": [ + "code", + "message" + ] + } + ] + }, + "type": "stream", + "input": { + "type": "object", + "properties": {} + } + } + } + } + } +} \ No newline at end of file diff --git a/python-client/tests/test_schema_handshake.json b/python-client/tests/test_schema_handshake.json new file mode 100644 index 00000000..d2f90bce --- /dev/null +++ b/python-client/tests/test_schema_handshake.json @@ -0,0 +1,118 @@ +{ + "services": { + "test": { + "procedures": { + "echo": { + "init": { + "type": "object", + "properties": { + "msg": { + "type": "string" + } + }, + "required": ["msg"] + }, + "output": { + "type": "object", + "properties": { + "response": { + "type": "string" + } + }, + "required": ["response"] + }, + "errors": { + "anyOf": [ + { + "type": "object", + "properties": { + "code": { + "const": "UNCAUGHT_ERROR", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": ["code", "message"] + }, + { + "type": "object", + "properties": { + "code": { + "const": "UNEXPECTED_DISCONNECT", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": ["code", "message"] + }, + { + "type": "object", + "properties": { + "code": { + "const": "INVALID_REQUEST", + "type": "string" + }, + "message": { + "type": "string" + }, + "extras": { + "type": "object", + "properties": { + "firstValidationErrors": { + "type": "array", + "items": { + "type": "object", + "properties": { + "path": { + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": ["path", "message"] + } + }, + "totalErrors": { + "type": "number" + } + }, + "required": ["firstValidationErrors", "totalErrors"] + } + }, + "required": ["code", "message"] + }, + { + "type": "object", + "properties": { + "code": { + "const": "CANCEL", + "type": "string" + }, + "message": { + "type": "string" + } + }, + "required": ["code", "message"] + } + ] + }, + "type": "rpc" + } + } + } + }, + "handshakeSchema": { + "type": "object", + "properties": { + "token": { + "type": "string" + } + }, + "required": ["token"] + } +} diff --git a/python-client/tests/test_server.ts b/python-client/tests/test_server.ts new file mode 100644 index 00000000..924ba3cb --- /dev/null +++ b/python-client/tests/test_server.ts @@ -0,0 +1,469 @@ +/** + * Standalone test server for the Python River client test suite. + * + * Starts a WebSocket server with the standard test services and prints + * the port to stdout so the Python test harness can connect. + * + * Usage (from river repo root): + * npx tsx --tsconfig python-client/tsconfig.tsx.json python-client/tests/test_server.ts + */ +import http from 'node:http'; +import { WebSocketServer } from 'ws'; +import { WebSocketServerTransport } from '../../transport/impls/ws/server'; +import { + createServer, + createServiceSchema, + Procedure, + Ok, + Err, +} from '../../router'; +import { Type } from '@sinclair/typebox'; +import { BinaryCodec } from '../../codec/binary'; +import { NaiveJsonCodec } from '../../codec/json'; + +const ServiceSchema = createServiceSchema(); + +const RecursivePayload = Type.Recursive((This) => + Type.Object({ + value: Type.String(), + children: Type.Optional(Type.Array(This)), + }), +); + +// ------------------------------------------------------------------- +// TestService – mirrors the TS TestServiceSchema +// ------------------------------------------------------------------- +let count = 0; + +const TestServiceSchema = ServiceSchema.define({ + add: Procedure.rpc({ + requestInit: Type.Object({ n: Type.Number() }), + responseData: Type.Object({ result: Type.Number() }), + responseError: Type.Never(), + async handler({ reqInit }) { + count += reqInit.n; + + return Ok({ result: count }); + }, + }), + echo: Procedure.stream({ + requestInit: Type.Object({}), + requestData: Type.Object({ + msg: Type.String(), + ignore: Type.Optional(Type.Boolean()), + }), + responseData: Type.Object({ response: Type.String() }), + responseError: Type.Never(), + async handler({ reqReadable, resWritable }) { + for await (const result of reqReadable) { + if (!result.ok) break; + const val = result.payload; + if (val.ignore) continue; + resWritable.write(Ok({ response: val.msg })); + } + resWritable.close(); + }, + }), + echoWithPrefix: Procedure.stream({ + requestInit: Type.Object({ prefix: Type.String() }), + requestData: Type.Object({ + msg: Type.String(), + ignore: Type.Optional(Type.Boolean()), + }), + responseData: Type.Object({ response: Type.String() }), + responseError: Type.Never(), + async handler({ reqInit, reqReadable, resWritable }) { + for await (const result of reqReadable) { + if (!result.ok) break; + const val = result.payload; + if (val.ignore) continue; + resWritable.write(Ok({ response: `${reqInit.prefix} ${val.msg}` })); + } + resWritable.close(); + }, + }), + echoBinary: Procedure.rpc({ + requestInit: Type.Object({ data: Type.Uint8Array() }), + responseData: Type.Object({ + data: Type.Uint8Array(), + length: Type.Number(), + }), + responseError: Type.Never(), + async handler({ reqInit }) { + return Ok({ data: reqInit.data, length: reqInit.data.length }); + }, + }), + echoRecursive: Procedure.rpc({ + requestInit: RecursivePayload, + responseData: RecursivePayload, + responseError: Type.Never(), + async handler({ reqInit }) { + return Ok(reqInit); + }, + }), +}); + +// ------------------------------------------------------------------- +// OrderingService – for message ordering tests +// ------------------------------------------------------------------- +const msgs: Array = []; + +const OrderingServiceSchema = ServiceSchema.define({ + add: Procedure.rpc({ + requestInit: Type.Object({ n: Type.Number() }), + responseData: Type.Object({ n: Type.Number() }), + responseError: Type.Never(), + async handler({ reqInit }) { + msgs.push(reqInit.n); + + return Ok({ n: reqInit.n }); + }, + }), + getAll: Procedure.rpc({ + requestInit: Type.Object({}), + responseData: Type.Object({ msgs: Type.Array(Type.Number()) }), + responseError: Type.Never(), + async handler(_ctx) { + const copy: Array = [...msgs]; + + return Ok({ msgs: copy }); + }, + }), +}); + +// ------------------------------------------------------------------- +// FallibleService – service-level errors +// ------------------------------------------------------------------- +const FallibleServiceSchema = ServiceSchema.define({ + divide: Procedure.rpc({ + requestInit: Type.Object({ a: Type.Number(), b: Type.Number() }), + responseData: Type.Object({ result: Type.Number() }), + responseError: Type.Union([ + Type.Object({ + code: Type.Literal('DIV_BY_ZERO'), + message: Type.String(), + }), + Type.Object({ + code: Type.Literal('INFINITY'), + message: Type.String(), + }), + ]), + async handler({ reqInit }) { + if (reqInit.b === 0) { + return Err({ + code: 'DIV_BY_ZERO' as const, + message: 'Cannot divide by zero', + }); + } + const result = reqInit.a / reqInit.b; + if (!isFinite(result)) { + return Err({ + code: 'INFINITY' as const, + message: 'Result is infinity', + }); + } + + return Ok({ result }); + }, + }), + echo: Procedure.stream({ + requestInit: Type.Object({}), + requestData: Type.Object({ + msg: Type.String(), + throwResult: Type.Optional(Type.Boolean()), + throwError: Type.Optional(Type.Boolean()), + }), + responseData: Type.Object({ response: Type.String() }), + responseError: Type.Object({ + code: Type.Literal('STREAM_ERROR'), + message: Type.String(), + }), + async handler({ reqReadable, resWritable }) { + for await (const result of reqReadable) { + if (!result.ok) break; + const val = result.payload; + if (val.throwError) { + throw new Error('uncaught error'); + } + if (val.throwResult) { + resWritable.write( + Err({ code: 'STREAM_ERROR' as const, message: 'stream error' }), + ); + continue; + } + resWritable.write(Ok({ response: val.msg })); + } + resWritable.close(); + }, + }), +}); + +// ------------------------------------------------------------------- +// SubscribableService – subscriptions +// ------------------------------------------------------------------- +let subCount = 0; + +type SubListener = (val: number) => void; +const subListeners = new Set(); + +const SubscribableServiceSchema = ServiceSchema.define({ + add: Procedure.rpc({ + requestInit: Type.Object({ n: Type.Number() }), + responseData: Type.Object({ result: Type.Number() }), + responseError: Type.Never(), + async handler({ reqInit }) { + subCount += reqInit.n; + for (const l of subListeners) l(subCount); + + return Ok({ result: subCount }); + }, + }), + value: Procedure.subscription({ + requestInit: Type.Object({}), + responseData: Type.Object({ count: Type.Number() }), + responseError: Type.Never(), + async handler({ resWritable, ctx }) { + const listener: SubListener = (val) => { + resWritable.write(Ok({ count: val })); + }; + // Send initial value + resWritable.write(Ok({ count: subCount })); + subListeners.add(listener); + ctx.signal.addEventListener('abort', () => { + subListeners.delete(listener); + resWritable.close(); + }); + }, + }), +}); + +// ------------------------------------------------------------------- +// UploadableService – uploads +// ------------------------------------------------------------------- +const UploadableServiceSchema = ServiceSchema.define({ + addMultiple: Procedure.upload({ + requestInit: Type.Object({}), + requestData: Type.Object({ n: Type.Number() }), + responseData: Type.Object({ result: Type.Number() }), + responseError: Type.Never(), + async handler({ reqReadable }) { + let total = 0; + for await (const result of reqReadable) { + if (!result.ok) break; + total += result.payload.n; + } + + return Ok({ result: total }); + }, + }), + addMultipleWithPrefix: Procedure.upload({ + requestInit: Type.Object({ prefix: Type.String() }), + requestData: Type.Object({ n: Type.Number() }), + responseData: Type.Object({ result: Type.String() }), + responseError: Type.Never(), + async handler({ reqInit, reqReadable }) { + let total = 0; + for await (const result of reqReadable) { + if (!result.ok) break; + total += result.payload.n; + } + + return Ok({ result: `${reqInit.prefix} ${total}` }); + }, + }), + cancellableAdd: Procedure.upload({ + requestInit: Type.Object({}), + requestData: Type.Object({ n: Type.Number() }), + responseData: Type.Object({ result: Type.Number() }), + responseError: Type.Object({ + code: Type.Literal('CANCEL'), + message: Type.String(), + }), + async handler({ reqReadable, ctx }) { + let total = 0; + for await (const result of reqReadable) { + if (!result.ok) break; + total += result.payload.n; + if (total >= 10) { + ctx.cancel(); + + return Err({ + code: 'CANCEL' as const, + message: 'total exceeds limit', + }); + } + } + + return Ok({ result: total }); + }, + }), +}); + +// ------------------------------------------------------------------- +// CancellationService – handlers that block forever for cancel tests +// ------------------------------------------------------------------- +const CancellationServiceSchema = ServiceSchema.define({ + blockingRpc: Procedure.rpc({ + requestInit: Type.Object({}), + responseData: Type.Object({}), + responseError: Type.Never(), + async handler({ ctx }) { + // Block until cancelled + return new Promise((_resolve) => { + ctx.signal.addEventListener('abort', () => { + // Handler will be cancelled by the framework, nothing to resolve + }); + }); + }, + }), + blockingStream: Procedure.stream({ + requestInit: Type.Object({}), + requestData: Type.Object({}), + responseData: Type.Object({}), + responseError: Type.Never(), + async handler(_ctx) { + return new Promise(() => { + // never resolves + }); + }, + }), + blockingUpload: Procedure.upload({ + requestInit: Type.Object({}), + requestData: Type.Object({}), + responseData: Type.Object({}), + responseError: Type.Never(), + async handler(_ctx) { + return new Promise(() => { + // never resolves + }); + }, + }), + blockingSubscription: Procedure.subscription({ + requestInit: Type.Object({}), + responseData: Type.Object({}), + responseError: Type.Never(), + async handler(_ctx) { + return new Promise(() => { + // never resolves + }); + }, + }), + // RPC that resolves normally (for clean handler cancellation) + immediateRpc: Procedure.rpc({ + requestInit: Type.Object({}), + responseData: Type.Object({ done: Type.Boolean() }), + responseError: Type.Never(), + async handler() { + return Ok({ done: true }); + }, + }), + // Stream that writes one response and closes (for clean handler cancel) + immediateStream: Procedure.stream({ + requestInit: Type.Object({}), + requestData: Type.Object({}), + responseData: Type.Object({ done: Type.Boolean() }), + responseError: Type.Never(), + async handler({ reqReadable, resWritable }) { + resWritable.write(Ok({ done: true })); + for await (const result of reqReadable) { + if (!result.ok) break; + } + resWritable.close(); + }, + }), + // Upload that resolves immediately + immediateUpload: Procedure.upload({ + requestInit: Type.Object({}), + requestData: Type.Object({}), + responseData: Type.Object({ done: Type.Boolean() }), + responseError: Type.Never(), + async handler({ reqReadable }) { + for await (const result of reqReadable) { + if (!result.ok) break; + } + + return Ok({ done: true }); + }, + }), + // Subscription that closes immediately + immediateSubscription: Procedure.subscription({ + requestInit: Type.Object({}), + responseData: Type.Object({ done: Type.Boolean() }), + responseError: Type.Never(), + async handler({ resWritable }) { + resWritable.write(Ok({ done: true })); + resWritable.close(); + }, + }), + // Stream that sends N responses then closes (for idempotent close tests) + countedStream: Procedure.stream({ + requestInit: Type.Object({ total: Type.Number() }), + requestData: Type.Object({}), + responseData: Type.Object({ i: Type.Number() }), + responseError: Type.Never(), + async handler({ reqInit, reqReadable, resWritable }) { + for (let i = 0; i < reqInit.total; i++) { + resWritable.write(Ok({ i })); + } + // Wait for client to close the request stream + for await (const result of reqReadable) { + if (!result.ok) break; + } + resWritable.close(); + }, + }), +}); + +// ------------------------------------------------------------------- +// Boot the server +// ------------------------------------------------------------------- +const services = { + test: TestServiceSchema, + ordering: OrderingServiceSchema, + fallible: FallibleServiceSchema, + subscribable: SubscribableServiceSchema, + uploadable: UploadableServiceSchema, + cancel: CancellationServiceSchema, +}; + +async function main() { + const codec = + process.env.RIVER_CODEC === 'binary' ? BinaryCodec : NaiveJsonCodec; + + const httpServer = http.createServer(); + const port = await new Promise((resolve, reject) => { + httpServer.listen(0, '127.0.0.1', () => { + const addr = httpServer.address(); + if (typeof addr === 'object' && addr) resolve(addr.port); + else reject(new Error("couldn't get port")); + }); + }); + + const wss = new WebSocketServer({ server: httpServer }); + const serverTransport = new WebSocketServerTransport(wss, 'SERVER', { + codec, + }); + const _server = createServer(serverTransport, services); + + // Signal that the server is ready by printing the port + process.stdout.write(`RIVER_PORT=${port}\n`); + + // Keep the server alive + process.on('SIGTERM', () => { + void _server.close().then(() => { + httpServer.close(); + process.exit(0); + }); + }); + process.on('SIGINT', () => { + void _server.close().then(() => { + httpServer.close(); + process.exit(0); + }); + }); +} + +main().catch((err: unknown) => { + console.error('Failed to start test server:', err); + process.exit(1); +}); diff --git a/python-client/tests/test_server_handshake.ts b/python-client/tests/test_server_handshake.ts new file mode 100644 index 00000000..62d712aa --- /dev/null +++ b/python-client/tests/test_server_handshake.ts @@ -0,0 +1,82 @@ +/** + * Test server with handshake validation for Python client tests. + * + * Requires clients to send handshake metadata with {token: string}. + * Valid token is "valid-token". + */ +import http from 'node:http'; +import { WebSocketServer } from 'ws'; +import { WebSocketServerTransport } from '../../transport/impls/ws/server'; +import { createServer, createServiceSchema, Procedure, Ok } from '../../router'; +import { createServerHandshakeOptions } from '../../router/handshake'; +import { Type } from '@sinclair/typebox'; +import { BinaryCodec } from '../../codec/binary'; + +const ServiceSchema = createServiceSchema(); + +const HandshakeTestServiceSchema = ServiceSchema.define({ + echo: Procedure.rpc({ + requestInit: Type.Object({ msg: Type.String() }), + responseData: Type.Object({ response: Type.String() }), + responseError: Type.Never(), + async handler({ reqInit }) { + return Ok({ response: reqInit.msg }); + }, + }), +}); + +const services = { + test: HandshakeTestServiceSchema, +}; + +const handshakeSchema = Type.Object({ token: Type.String() }); + +async function main() { + const httpServer = http.createServer(); + const port = await new Promise((resolve, reject) => { + httpServer.listen(0, '127.0.0.1', () => { + const addr = httpServer.address(); + if (typeof addr === 'object' && addr) resolve(addr.port); + else reject(new Error("couldn't get port")); + }); + }); + + const wss = new WebSocketServer({ server: httpServer }); + const serverTransport = new WebSocketServerTransport( + wss, + 'HANDSHAKE_SERVER', + { codec: BinaryCodec }, + ); + const _server = createServer(serverTransport, services, { + handshakeOptions: createServerHandshakeOptions( + handshakeSchema, + (metadata) => { + if (metadata.token !== 'valid-token') { + return 'REJECTED_BY_CUSTOM_HANDLER' as const; + } + + return {}; + }, + ), + }); + + process.stdout.write(`RIVER_PORT=${port}\n`); + + process.on('SIGTERM', () => { + void _server.close().then(() => { + httpServer.close(); + process.exit(0); + }); + }); + process.on('SIGINT', () => { + void _server.close().then(() => { + httpServer.close(); + process.exit(0); + }); + }); +} + +main().catch((err: unknown) => { + console.error('Failed to start handshake test server:', err); + process.exit(1); +}); diff --git a/python-client/tests/test_session.py b/python-client/tests/test_session.py new file mode 100644 index 00000000..5a7ed61e --- /dev/null +++ b/python-client/tests/test_session.py @@ -0,0 +1,562 @@ +"""Deterministic session lifecycle tests. + +Uses short timeouts to test heartbeat miss, grace period expiry, +and retry budget behavior without slow waits. +""" + +from __future__ import annotations + +import asyncio + +import pytest + +from river.client import RiverClient +from river.codec import BinaryCodec +from river.session import SessionOptions, SessionState +from river.transport import WebSocketClientTransport +from tests.test_utils import ( + wait_for, + wait_for_connected, + wait_for_disconnected, + wait_for_session_gone, +) + +SHORT_OPTIONS = SessionOptions( + heartbeat_interval_ms=100, + heartbeats_until_dead=2, # 200ms miss timeout + session_disconnect_grace_ms=300, # 300ms grace + connection_timeout_ms=2000, + handshake_timeout_ms=1000, +) + + +async def make_client( + server_url: str, + options: SessionOptions | None = None, +) -> RiverClient: + transport = WebSocketClientTransport( + ws_url=server_url, + client_id=None, + server_id="SERVER", + codec=BinaryCodec(), + options=options or SHORT_OPTIONS, + ) + return RiverClient( + transport, + server_id="SERVER", + connect_on_invoke=True, + eagerly_connect=False, + ) + + +async def cleanup(client: RiverClient) -> None: + await client.transport.close() + + +# ===================================================================== +# Heartbeat Miss Tests +# ===================================================================== + + +class TestHeartbeatMiss: + @pytest.mark.asyncio + async def test_ws_close_triggers_no_connection(self, server_url: str): + """Force-closing WS transitions session to NO_CONNECTION.""" + client = await make_client(server_url) + try: + result = await client.rpc("test", "add", {"n": 1}) + assert result["ok"] is True + + session = client.transport.sessions.get("SERVER") + assert session is not None + assert session.state == SessionState.CONNECTED + + client.transport.reconnect_on_connection_drop = False + await session._ws.close() + + await wait_for_disconnected(client.transport) + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_active_rpcs_keep_alive(self, server_url: str): + """Active RPCs reset heartbeat miss — no spurious disconnect.""" + client = await make_client(server_url) + try: + for _ in range(5): + result = await client.rpc("test", "add", {"n": 1}) + assert result["ok"] is True + + session = client.transport.sessions.get("SERVER") + assert session is not None + assert session.state == SessionState.CONNECTED + finally: + await cleanup(client) + + +# ===================================================================== +# Grace Period Tests +# ===================================================================== + + +class TestGracePeriod: + @pytest.mark.asyncio + async def test_grace_period_expiry_destroys_session(self, server_url: str): + """Session destroyed after grace period elapses.""" + client = await make_client(server_url) + try: + result = await client.rpc("test", "add", {"n": 1}) + assert result["ok"] is True + + client.transport.reconnect_on_connection_drop = False + session = client.transport.sessions.get("SERVER") + assert session is not None + await session._ws.close() + + await wait_for_disconnected(client.transport) + await wait_for_session_gone(client.transport) + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_reconnect_within_grace_preserves_session(self, server_url: str): + """Reconnecting within grace period preserves the session.""" + opts = SessionOptions( + heartbeat_interval_ms=100, + heartbeats_until_dead=2, + session_disconnect_grace_ms=5000, + connection_timeout_ms=2000, + handshake_timeout_ms=1000, + ) + client = await make_client(server_url, options=opts) + try: + result = await client.rpc("test", "add", {"n": 1}) + assert result["ok"] is True + + session = client.transport.sessions.get("SERVER") + assert session is not None + await session._ws.close() + + # Auto-reconnect is on; wait for reconnection + await wait_for_connected(client.transport) + + result = await client.rpc("test", "add", {"n": 2}) + assert result["ok"] is True + finally: + await cleanup(client) + + +# ===================================================================== +# Retry Budget Tests +# ===================================================================== + + +class TestRetryBudget: + @pytest.mark.asyncio + async def test_backoff_increases_on_failures(self, server_url: str): + """Retry backoff increases after failed attempts.""" + transport = WebSocketClientTransport( + ws_url="ws://127.0.0.1:1", # intentionally invalid + client_id=None, + server_id="INVALID", + codec=BinaryCodec(), + options=SessionOptions( + connection_timeout_ms=200, + handshake_timeout_ms=200, + session_disconnect_grace_ms=500, + ), + ) + try: + budget = transport._retry_budget + assert budget.has_budget() + initial_backoff = budget.get_backoff_ms() + + budget.consume_budget() + budget.consume_budget() + budget.consume_budget() + + higher_backoff = budget.get_backoff_ms() + assert higher_backoff > initial_backoff + finally: + await transport.close() + + @pytest.mark.asyncio + async def test_budget_restores_after_success(self, server_url: str): + """Budget restores gradually after successful connection.""" + client = await make_client(server_url) + try: + result = await client.rpc("test", "add", {"n": 1}) + assert result["ok"] is True + + budget = client.transport._retry_budget + await wait_for(lambda: budget.budget_consumed <= 1, timeout=2.0) + finally: + await cleanup(client) + + +# ===================================================================== +# Grace Period Expiry During Active Procedures +# ===================================================================== + + +class TestGracePeriodActiveProcedures: + """Grace period expiry while a procedure is in-flight should + produce UNEXPECTED_DISCONNECT — mirroring disconnects.test.ts.""" + + @pytest.mark.asyncio + async def test_rpc_gets_disconnect_on_grace_expiry(self, server_url: str): + """RPC buffered during disconnect gets UNEXPECTED_DISCONNECT.""" + transport = WebSocketClientTransport( + ws_url=server_url, + client_id=None, + server_id="SERVER", + codec=BinaryCodec(), + options=SHORT_OPTIONS, + ) + client = RiverClient( + transport, + server_id="SERVER", + connect_on_invoke=False, + eagerly_connect=True, + ) + try: + await wait_for_connected(transport) + + result = await client.rpc("test", "add", {"n": 1}) + assert result["ok"] is True + + transport.reconnect_on_connection_drop = False + session = transport.sessions.get("SERVER") + assert session is not None + await session._ws.close() + + # Buffer an RPC on the disconnected session + rpc_task = asyncio.create_task(client.rpc("test", "add", {"n": 2})) + await asyncio.sleep(0) # yield so task starts + + # Grace period expires → session destroyed → RPC fails + await wait_for_session_gone(transport) + + result = await asyncio.wait_for(rpc_task, timeout=2.0) + assert result["ok"] is False + assert result["payload"]["code"] == "UNEXPECTED_DISCONNECT" + finally: + await transport.close() + + @pytest.mark.asyncio + async def test_stream_gets_disconnect_on_grace_expiry(self, server_url: str): + """Active stream gets UNEXPECTED_DISCONNECT after grace period.""" + client = await make_client(server_url) + try: + stream = client.stream("test", "echo", {}) + stream.req_writable.write({"msg": "hello", "ignore": False}) + done, msg = await stream.res_readable.next() + assert not done + assert msg["ok"] is True + + client.transport.reconnect_on_connection_drop = False + session = client.transport.sessions.get("SERVER") + assert session is not None + await session._ws.close() + + await wait_for_session_gone(client.transport) + + done, msg = await stream.res_readable.next() + assert not done + assert msg["ok"] is False + assert msg["payload"]["code"] == "UNEXPECTED_DISCONNECT" + assert not stream.req_writable.is_writable() + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_upload_gets_disconnect_on_grace_expiry(self, server_url: str): + """Upload in-flight gets UNEXPECTED_DISCONNECT after grace period.""" + client = await make_client(server_url) + try: + upload = client.upload("uploadable", "addMultiple", {}) + upload.req_writable.write({"n": 1}) + + # Ensure connection established + await wait_for_connected(client.transport) + + client.transport.reconnect_on_connection_drop = False + session = client.transport.sessions.get("SERVER") + assert session is not None + await session._ws.close() + + await wait_for_session_gone(client.transport) + + result = await asyncio.wait_for(upload.finalize(), timeout=2.0) + assert result["ok"] is False + assert result["payload"]["code"] == "UNEXPECTED_DISCONNECT" + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_subscription_gets_disconnect_on_grace_expiry(self, server_url: str): + """Subscription gets UNEXPECTED_DISCONNECT after grace period.""" + client = await make_client(server_url) + try: + sub = client.subscribe("subscribable", "value", {}) + done, msg = await sub.res_readable.next() + assert not done + assert msg["ok"] is True + + client.transport.reconnect_on_connection_drop = False + session = client.transport.sessions.get("SERVER") + assert session is not None + await session._ws.close() + + await wait_for_session_gone(client.transport) + + done, msg = await sub.res_readable.next() + assert not done + assert msg["ok"] is False + assert msg["payload"]["code"] == "UNEXPECTED_DISCONNECT" + finally: + await cleanup(client) + + +# ===================================================================== +# Reconnect After Grace Expiry +# ===================================================================== + + +class TestReconnectAfterGrace: + @pytest.mark.asyncio + async def test_rpc_after_grace_expiry_creates_new_session(self, server_url: str): + """After grace period expires, a new RPC creates a fresh session.""" + client = await make_client(server_url) + try: + result = await client.rpc("test", "add", {"n": 1}) + assert result["ok"] is True + + old_session = client.transport.sessions.get("SERVER") + assert old_session is not None + old_id = old_session.id + + client.transport.reconnect_on_connection_drop = False + await old_session._ws.close() + await wait_for_session_gone(client.transport) + + # Re-enable reconnect and make a new RPC + client.transport.reconnect_on_connection_drop = True + result = await client.rpc("test", "add", {"n": 2}) + assert result["ok"] is True + + new_session = client.transport.sessions.get("SERVER") + assert new_session is not None + assert new_session.id != old_id + finally: + await cleanup(client) + + @pytest.mark.asyncio + async def test_connect_on_invoke_false_no_reconnect(self, server_url: str): + """With connect_on_invoke=False, no reconnect after grace expiry.""" + transport = WebSocketClientTransport( + ws_url=server_url, + client_id=None, + server_id="SERVER", + codec=BinaryCodec(), + options=SHORT_OPTIONS, + ) + client = RiverClient( + transport, + server_id="SERVER", + connect_on_invoke=False, + eagerly_connect=True, + ) + try: + await wait_for_connected(transport) + + result = await client.rpc("test", "add", {"n": 1}) + assert result["ok"] is True + + transport.reconnect_on_connection_drop = False + session = transport.sessions.get("SERVER") + assert session is not None + await session._ws.close() + await wait_for_session_gone(transport) + + transport.reconnect_on_connection_drop = True + + # Close transport; RPC on closed transport → UNEXPECTED_DISCONNECT + await transport.close() + + result = await client.rpc("test", "add", {"n": 2}) + assert result["ok"] is False + assert result["payload"]["code"] == "UNEXPECTED_DISCONNECT" + finally: + # transport already closed above + pass + + +# ===================================================================== +# Regression: stale connect-task must not block fail-fast +# ===================================================================== + + +class TestStaleConnectTask: + @pytest.mark.asyncio + async def test_done_connect_task_does_not_block_failfast(self): + """A completed (done) connect task in _connect_tasks must not + prevent the fail-fast path from firing. + + Regression: previously the check was `to not in _connect_tasks`, + so a done task kept the entry alive and calls would hang instead + of failing immediately when retries were exhausted. + """ + transport = WebSocketClientTransport( + ws_url="ws://127.0.0.1:1", # unreachable + client_id=None, + server_id="STALE", + codec=BinaryCodec(), + options=SessionOptions( + connection_timeout_ms=100, + handshake_timeout_ms=100, + session_disconnect_grace_ms=200, + ), + ) + transport.reconnect_on_connection_drop = False + try: + # Trigger a connect that will fail + transport.connect("STALE") + await wait_for_session_gone(transport, "STALE") + + # The done task is still in _connect_tasks + assert "STALE" in transport._connect_tasks + assert transport._connect_tasks["STALE"].done() + + # Exhaust the retry budget so connect() is a no-op + transport._retry_budget.budget_consumed = ( + transport._retry_budget.attempt_budget_capacity + ) + + # RPC must fail immediately, not hang + client = RiverClient(transport, server_id="STALE", connect_on_invoke=True) + result = await asyncio.wait_for( + client.rpc("test", "add", {"n": 1}), timeout=1.0 + ) + assert result["ok"] is False + assert result["payload"]["code"] == "UNEXPECTED_DISCONNECT" + finally: + await transport.close() + + +# ===================================================================== +# Regression: grace period must not reset on each failed reconnect +# ===================================================================== + + +class TestGracePeriodNotResetOnRetry: + @pytest.mark.asyncio + async def test_grace_period_not_extended_by_retries(self, server_url: str): + """Repeated connection failures must not restart the grace timer. + + Regression: _on_connection_failed() unconditionally called + start_grace_period(), which cancelled and restarted the timer + on every retry, extending session lifetime far beyond + session_disconnect_grace_ms. + """ + grace_ms = 400 + transport = WebSocketClientTransport( + ws_url="ws://127.0.0.1:1", # unreachable + client_id=None, + server_id="GRACE", + codec=BinaryCodec(), + options=SessionOptions( + connection_timeout_ms=100, + handshake_timeout_ms=100, + session_disconnect_grace_ms=grace_ms, + ), + ) + try: + transport.connect("GRACE") + + # Wait for at least one connection failure to set the grace period + await wait_for( + lambda: ( + (s := transport.sessions.get("GRACE")) is not None + and s._grace_period_task is not None + ), + timeout=2.0, + ) + + session = transport.sessions["GRACE"] + original_expiry = session._grace_expiry_time + assert original_expiry is not None + + # After further retries, the expiry time must not have moved forward + await asyncio.sleep(0.2) + session2 = transport.sessions.get("GRACE") + if session2 is not None and session2._grace_expiry_time is not None: + assert session2._grace_expiry_time <= original_expiry + + # Session should be gone within grace_ms + generous margin + await wait_for_session_gone(transport, "GRACE", timeout=3.0) + finally: + await transport.close() + + +# ===================================================================== +# Regression: fail-fast must not orphan existing in-flight procedures +# ===================================================================== + + +class TestFailFastEmitsClosing: + @pytest.mark.asyncio + async def test_failfast_notifies_existing_streams(self, server_url: str): + """When fail-fast deletes a session, existing in-flight procedures + must receive the sessionStatus 'closing' event so they get + UNEXPECTED_DISCONNECT instead of hanging. + + Regression: _delete_session was called with emit_closing=False, + silently removing the session while older streams still waited. + """ + transport = WebSocketClientTransport( + ws_url=server_url, + client_id=None, + server_id="SERVER", + codec=BinaryCodec(), + options=SHORT_OPTIONS, + ) + client = RiverClient( + transport, + server_id="SERVER", + connect_on_invoke=True, + eagerly_connect=False, + ) + try: + # Open a stream so there's an in-flight procedure + stream = client.stream("test", "echo", {}) + stream.req_writable.write({"msg": "hello", "ignore": False}) + done, msg = await stream.res_readable.next() + assert not done + assert msg["ok"] is True + + # Drop connection, disable reconnect so session stays NO_CONNECTION + transport.reconnect_on_connection_drop = False + session = transport.sessions.get("SERVER") + assert session is not None + await session._ws.close() + await wait_for_disconnected(transport) + + # Exhaust retry budget so connect() in the next RPC is a no-op + transport._retry_budget.budget_consumed = ( + transport._retry_budget.attempt_budget_capacity + ) + + # This RPC hits the fail-fast path and deletes the session + result = await asyncio.wait_for( + client.rpc("test", "add", {"n": 1}), timeout=2.0 + ) + assert result["ok"] is False + + # The existing stream must have received UNEXPECTED_DISCONNECT + done, msg = await asyncio.wait_for(stream.res_readable.next(), timeout=2.0) + assert not done + assert msg["ok"] is False + assert msg["payload"]["code"] == "UNEXPECTED_DISCONNECT" + finally: + await transport.close() diff --git a/python-client/tests/test_utils.py b/python-client/tests/test_utils.py new file mode 100644 index 00000000..4b80047e --- /dev/null +++ b/python-client/tests/test_utils.py @@ -0,0 +1,120 @@ +"""Test utilities for River Python client tests. + +Provides event-driven waiters to replace arbitrary sleeps. +""" + +from __future__ import annotations + +import asyncio +from typing import Any, Callable + +from river.session import SessionState +from river.transport import WebSocketClientTransport + + +async def wait_for( + predicate: Callable[[], bool], + *, + timeout: float = 5.0, + interval: float = 0.01, +) -> None: + """Poll a predicate until it returns True, or raise TimeoutError. + + Args: + predicate: Zero-arg callable returning bool. + timeout: Max seconds to wait. + interval: Seconds between polls. + """ + deadline = asyncio.get_event_loop().time() + timeout + while not predicate(): + if asyncio.get_event_loop().time() > deadline: + raise TimeoutError(f"wait_for timed out after {timeout}s") + await asyncio.sleep(interval) + + +async def wait_for_session_state( + transport: WebSocketClientTransport, + server_id: str, + state: SessionState, + *, + timeout: float = 5.0, +) -> None: + """Wait until the session reaches the given state.""" + await wait_for( + lambda: ( + (s := transport.sessions.get(server_id)) is not None and s.state == state + ), + timeout=timeout, + ) + + +async def wait_for_connected( + transport: WebSocketClientTransport, + server_id: str = "SERVER", + *, + timeout: float = 5.0, +) -> None: + """Wait until session is CONNECTED.""" + await wait_for_session_state( + transport, server_id, SessionState.CONNECTED, timeout=timeout + ) + + +async def wait_for_session_gone( + transport: WebSocketClientTransport, + server_id: str = "SERVER", + *, + timeout: float = 5.0, +) -> None: + """Wait until the session for server_id no longer exists.""" + await wait_for( + lambda: transport.sessions.get(server_id) is None, + timeout=timeout, + ) + + +async def wait_for_disconnected( + transport: WebSocketClientTransport, + server_id: str = "SERVER", + *, + timeout: float = 5.0, +) -> None: + """Wait until session is NO_CONNECTION.""" + await wait_for_session_state( + transport, server_id, SessionState.NO_CONNECTION, timeout=timeout + ) + + +async def wait_for_event( + transport: WebSocketClientTransport, + event_name: str, + *, + timeout: float = 5.0, + predicate: Callable[[Any], bool] | None = None, +) -> Any: + """Wait for a specific event to be dispatched on the transport. + + Args: + transport: The transport to listen on. + event_name: Event name (e.g. "protocolError", "sessionStatus"). + timeout: Max seconds to wait. + predicate: Optional filter; if provided, only events matching + this predicate will resolve the wait. + + Returns: + The event data. + """ + fut: asyncio.Future[Any] = asyncio.get_event_loop().create_future() + + def handler(data: Any) -> None: + if fut.done(): + return + if predicate is not None and not predicate(data): + return + fut.set_result(data) + + transport.add_event_listener(event_name, handler) + try: + return await asyncio.wait_for(fut, timeout=timeout) + finally: + transport.remove_event_listener(event_name, handler) diff --git a/python-client/uv.lock b/python-client/uv.lock new file mode 100644 index 00000000..94d56717 --- /dev/null +++ b/python-client/uv.lock @@ -0,0 +1,479 @@ +version = 1 +requires-python = ">=3.10" + +[[package]] +name = "backports-asyncio-runner" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162", size = 69893 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313 }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740 }, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865 }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484 }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 }, +] + +[[package]] +name = "markupsafe" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/4b/3541d44f3937ba468b75da9eebcae497dcf67adb65caa16760b0a6807ebb/markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559", size = 11631 }, + { url = "https://files.pythonhosted.org/packages/98/1b/fbd8eed11021cabd9226c37342fa6ca4e8a98d8188a8d9b66740494960e4/markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419", size = 12057 }, + { url = "https://files.pythonhosted.org/packages/40/01/e560d658dc0bb8ab762670ece35281dec7b6c1b33f5fbc09ebb57a185519/markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695", size = 22050 }, + { url = "https://files.pythonhosted.org/packages/af/cd/ce6e848bbf2c32314c9b237839119c5a564a59725b53157c856e90937b7a/markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591", size = 20681 }, + { url = "https://files.pythonhosted.org/packages/c9/2a/b5c12c809f1c3045c4d580b035a743d12fcde53cf685dbc44660826308da/markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c", size = 20705 }, + { url = "https://files.pythonhosted.org/packages/cf/e3/9427a68c82728d0a88c50f890d0fc072a1484de2f3ac1ad0bfc1a7214fd5/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f", size = 21524 }, + { url = "https://files.pythonhosted.org/packages/bc/36/23578f29e9e582a4d0278e009b38081dbe363c5e7165113fad546918a232/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6", size = 20282 }, + { url = "https://files.pythonhosted.org/packages/56/21/dca11354e756ebd03e036bd8ad58d6d7168c80ce1fe5e75218e4945cbab7/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1", size = 20745 }, + { url = "https://files.pythonhosted.org/packages/87/99/faba9369a7ad6e4d10b6a5fbf71fa2a188fe4a593b15f0963b73859a1bbd/markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa", size = 14571 }, + { url = "https://files.pythonhosted.org/packages/d6/25/55dc3ab959917602c96985cb1253efaa4ff42f71194bddeb61eb7278b8be/markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8", size = 15056 }, + { url = "https://files.pythonhosted.org/packages/d0/9e/0a02226640c255d1da0b8d12e24ac2aa6734da68bff14c05dd53b94a0fc3/markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1", size = 13932 }, + { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631 }, + { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058 }, + { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287 }, + { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940 }, + { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887 }, + { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692 }, + { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471 }, + { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923 }, + { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572 }, + { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077 }, + { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876 }, + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615 }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020 }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332 }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947 }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962 }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760 }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529 }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015 }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540 }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105 }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906 }, + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622 }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029 }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374 }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980 }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990 }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784 }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588 }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041 }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543 }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113 }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911 }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658 }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066 }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639 }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569 }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284 }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801 }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769 }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642 }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612 }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200 }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973 }, + { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619 }, + { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029 }, + { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408 }, + { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005 }, + { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048 }, + { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821 }, + { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606 }, + { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043 }, + { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747 }, + { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341 }, + { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073 }, + { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661 }, + { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069 }, + { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670 }, + { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598 }, + { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261 }, + { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835 }, + { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733 }, + { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672 }, + { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819 }, + { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426 }, + { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146 }, +] + +[[package]] +name = "msgpack" +version = "1.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4d/f2/bfb55a6236ed8725a96b0aa3acbd0ec17588e6a2c3b62a93eb513ed8783f/msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e", size = 173581 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f5/a2/3b68a9e769db68668b25c6108444a35f9bd163bb848c0650d516761a59c0/msgpack-1.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0051fffef5a37ca2cd16978ae4f0aef92f164df86823871b5162812bebecd8e2", size = 81318 }, + { url = "https://files.pythonhosted.org/packages/5b/e1/2b720cc341325c00be44e1ed59e7cfeae2678329fbf5aa68f5bda57fe728/msgpack-1.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a605409040f2da88676e9c9e5853b3449ba8011973616189ea5ee55ddbc5bc87", size = 83786 }, + { url = "https://files.pythonhosted.org/packages/71/e5/c2241de64bfceac456b140737812a2ab310b10538a7b34a1d393b748e095/msgpack-1.1.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b696e83c9f1532b4af884045ba7f3aa741a63b2bc22617293a2c6a7c645f251", size = 398240 }, + { url = "https://files.pythonhosted.org/packages/b7/09/2a06956383c0fdebaef5aa9246e2356776f12ea6f2a44bd1368abf0e46c4/msgpack-1.1.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:365c0bbe981a27d8932da71af63ef86acc59ed5c01ad929e09a0b88c6294e28a", size = 406070 }, + { url = "https://files.pythonhosted.org/packages/0e/74/2957703f0e1ef20637d6aead4fbb314330c26f39aa046b348c7edcf6ca6b/msgpack-1.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:41d1a5d875680166d3ac5c38573896453bbbea7092936d2e107214daf43b1d4f", size = 393403 }, + { url = "https://files.pythonhosted.org/packages/a5/09/3bfc12aa90f77b37322fc33e7a8a7c29ba7c8edeadfa27664451801b9860/msgpack-1.1.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:354e81bcdebaab427c3df4281187edc765d5d76bfb3a7c125af9da7a27e8458f", size = 398947 }, + { url = "https://files.pythonhosted.org/packages/4b/4f/05fcebd3b4977cb3d840f7ef6b77c51f8582086de5e642f3fefee35c86fc/msgpack-1.1.2-cp310-cp310-win32.whl", hash = "sha256:e64c8d2f5e5d5fda7b842f55dec6133260ea8f53c4257d64494c534f306bf7a9", size = 64769 }, + { url = "https://files.pythonhosted.org/packages/d0/3e/b4547e3a34210956382eed1c85935fff7e0f9b98be3106b3745d7dec9c5e/msgpack-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:db6192777d943bdaaafb6ba66d44bf65aa0e9c5616fa1d2da9bb08828c6b39aa", size = 71293 }, + { url = "https://files.pythonhosted.org/packages/2c/97/560d11202bcd537abca693fd85d81cebe2107ba17301de42b01ac1677b69/msgpack-1.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2e86a607e558d22985d856948c12a3fa7b42efad264dca8a3ebbcfa2735d786c", size = 82271 }, + { url = "https://files.pythonhosted.org/packages/83/04/28a41024ccbd67467380b6fb440ae916c1e4f25e2cd4c63abe6835ac566e/msgpack-1.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:283ae72fc89da59aa004ba147e8fc2f766647b1251500182fac0350d8af299c0", size = 84914 }, + { url = "https://files.pythonhosted.org/packages/71/46/b817349db6886d79e57a966346cf0902a426375aadc1e8e7a86a75e22f19/msgpack-1.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:61c8aa3bd513d87c72ed0b37b53dd5c5a0f58f2ff9f26e1555d3bd7948fb7296", size = 416962 }, + { url = "https://files.pythonhosted.org/packages/da/e0/6cc2e852837cd6086fe7d8406af4294e66827a60a4cf60b86575a4a65ca8/msgpack-1.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:454e29e186285d2ebe65be34629fa0e8605202c60fbc7c4c650ccd41870896ef", size = 426183 }, + { url = "https://files.pythonhosted.org/packages/25/98/6a19f030b3d2ea906696cedd1eb251708e50a5891d0978b012cb6107234c/msgpack-1.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7bc8813f88417599564fafa59fd6f95be417179f76b40325b500b3c98409757c", size = 411454 }, + { url = "https://files.pythonhosted.org/packages/b7/cd/9098fcb6adb32187a70b7ecaabf6339da50553351558f37600e53a4a2a23/msgpack-1.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bafca952dc13907bdfdedfc6a5f579bf4f292bdd506fadb38389afa3ac5b208e", size = 422341 }, + { url = "https://files.pythonhosted.org/packages/e6/ae/270cecbcf36c1dc85ec086b33a51a4d7d08fc4f404bdbc15b582255d05ff/msgpack-1.1.2-cp311-cp311-win32.whl", hash = "sha256:602b6740e95ffc55bfb078172d279de3773d7b7db1f703b2f1323566b878b90e", size = 64747 }, + { url = "https://files.pythonhosted.org/packages/2a/79/309d0e637f6f37e83c711f547308b91af02b72d2326ddd860b966080ef29/msgpack-1.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:d198d275222dc54244bf3327eb8cbe00307d220241d9cec4d306d49a44e85f68", size = 71633 }, + { url = "https://files.pythonhosted.org/packages/73/4d/7c4e2b3d9b1106cd0aa6cb56cc57c6267f59fa8bfab7d91df5adc802c847/msgpack-1.1.2-cp311-cp311-win_arm64.whl", hash = "sha256:86f8136dfa5c116365a8a651a7d7484b65b13339731dd6faebb9a0242151c406", size = 64755 }, + { url = "https://files.pythonhosted.org/packages/ad/bd/8b0d01c756203fbab65d265859749860682ccd2a59594609aeec3a144efa/msgpack-1.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa", size = 81939 }, + { url = "https://files.pythonhosted.org/packages/34/68/ba4f155f793a74c1483d4bdef136e1023f7bcba557f0db4ef3db3c665cf1/msgpack-1.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb", size = 85064 }, + { url = "https://files.pythonhosted.org/packages/f2/60/a064b0345fc36c4c3d2c743c82d9100c40388d77f0b48b2f04d6041dbec1/msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f", size = 417131 }, + { url = "https://files.pythonhosted.org/packages/65/92/a5100f7185a800a5d29f8d14041f61475b9de465ffcc0f3b9fba606e4505/msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42", size = 427556 }, + { url = "https://files.pythonhosted.org/packages/f5/87/ffe21d1bf7d9991354ad93949286f643b2bb6ddbeab66373922b44c3b8cc/msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9", size = 404920 }, + { url = "https://files.pythonhosted.org/packages/ff/41/8543ed2b8604f7c0d89ce066f42007faac1eaa7d79a81555f206a5cdb889/msgpack-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620", size = 415013 }, + { url = "https://files.pythonhosted.org/packages/41/0d/2ddfaa8b7e1cee6c490d46cb0a39742b19e2481600a7a0e96537e9c22f43/msgpack-1.1.2-cp312-cp312-win32.whl", hash = "sha256:1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029", size = 65096 }, + { url = "https://files.pythonhosted.org/packages/8c/ec/d431eb7941fb55a31dd6ca3404d41fbb52d99172df2e7707754488390910/msgpack-1.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b", size = 72708 }, + { url = "https://files.pythonhosted.org/packages/c5/31/5b1a1f70eb0e87d1678e9624908f86317787b536060641d6798e3cf70ace/msgpack-1.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69", size = 64119 }, + { url = "https://files.pythonhosted.org/packages/6b/31/b46518ecc604d7edf3a4f94cb3bf021fc62aa301f0cb849936968164ef23/msgpack-1.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4efd7b5979ccb539c221a4c4e16aac1a533efc97f3b759bb5a5ac9f6d10383bf", size = 81212 }, + { url = "https://files.pythonhosted.org/packages/92/dc/c385f38f2c2433333345a82926c6bfa5ecfff3ef787201614317b58dd8be/msgpack-1.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42eefe2c3e2af97ed470eec850facbe1b5ad1d6eacdbadc42ec98e7dcf68b4b7", size = 84315 }, + { url = "https://files.pythonhosted.org/packages/d3/68/93180dce57f684a61a88a45ed13047558ded2be46f03acb8dec6d7c513af/msgpack-1.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1fdf7d83102bf09e7ce3357de96c59b627395352a4024f6e2458501f158bf999", size = 412721 }, + { url = "https://files.pythonhosted.org/packages/5d/ba/459f18c16f2b3fc1a1ca871f72f07d70c07bf768ad0a507a698b8052ac58/msgpack-1.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fac4be746328f90caa3cd4bc67e6fe36ca2bf61d5c6eb6d895b6527e3f05071e", size = 424657 }, + { url = "https://files.pythonhosted.org/packages/38/f8/4398c46863b093252fe67368b44edc6c13b17f4e6b0e4929dbf0bdb13f23/msgpack-1.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fffee09044073e69f2bad787071aeec727183e7580443dfeb8556cbf1978d162", size = 402668 }, + { url = "https://files.pythonhosted.org/packages/28/ce/698c1eff75626e4124b4d78e21cca0b4cc90043afb80a507626ea354ab52/msgpack-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5928604de9b032bc17f5099496417f113c45bc6bc21b5c6920caf34b3c428794", size = 419040 }, + { url = "https://files.pythonhosted.org/packages/67/32/f3cd1667028424fa7001d82e10ee35386eea1408b93d399b09fb0aa7875f/msgpack-1.1.2-cp313-cp313-win32.whl", hash = "sha256:a7787d353595c7c7e145e2331abf8b7ff1e6673a6b974ded96e6d4ec09f00c8c", size = 65037 }, + { url = "https://files.pythonhosted.org/packages/74/07/1ed8277f8653c40ebc65985180b007879f6a836c525b3885dcc6448ae6cb/msgpack-1.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:a465f0dceb8e13a487e54c07d04ae3ba131c7c5b95e2612596eafde1dccf64a9", size = 72631 }, + { url = "https://files.pythonhosted.org/packages/e5/db/0314e4e2db56ebcf450f277904ffd84a7988b9e5da8d0d61ab2d057df2b6/msgpack-1.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:e69b39f8c0aa5ec24b57737ebee40be647035158f14ed4b40e6f150077e21a84", size = 64118 }, + { url = "https://files.pythonhosted.org/packages/22/71/201105712d0a2ff07b7873ed3c220292fb2ea5120603c00c4b634bcdafb3/msgpack-1.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e23ce8d5f7aa6ea6d2a2b326b4ba46c985dbb204523759984430db7114f8aa00", size = 81127 }, + { url = "https://files.pythonhosted.org/packages/1b/9f/38ff9e57a2eade7bf9dfee5eae17f39fc0e998658050279cbb14d97d36d9/msgpack-1.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6c15b7d74c939ebe620dd8e559384be806204d73b4f9356320632d783d1f7939", size = 84981 }, + { url = "https://files.pythonhosted.org/packages/8e/a9/3536e385167b88c2cc8f4424c49e28d49a6fc35206d4a8060f136e71f94c/msgpack-1.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99e2cb7b9031568a2a5c73aa077180f93dd2e95b4f8d3b8e14a73ae94a9e667e", size = 411885 }, + { url = "https://files.pythonhosted.org/packages/2f/40/dc34d1a8d5f1e51fc64640b62b191684da52ca469da9cd74e84936ffa4a6/msgpack-1.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:180759d89a057eab503cf62eeec0aa61c4ea1200dee709f3a8e9397dbb3b6931", size = 419658 }, + { url = "https://files.pythonhosted.org/packages/3b/ef/2b92e286366500a09a67e03496ee8b8ba00562797a52f3c117aa2b29514b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:04fb995247a6e83830b62f0b07bf36540c213f6eac8e851166d8d86d83cbd014", size = 403290 }, + { url = "https://files.pythonhosted.org/packages/78/90/e0ea7990abea5764e4655b8177aa7c63cdfa89945b6e7641055800f6c16b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8e22ab046fa7ede9e36eeb4cfad44d46450f37bb05d5ec482b02868f451c95e2", size = 415234 }, + { url = "https://files.pythonhosted.org/packages/72/4e/9390aed5db983a2310818cd7d3ec0aecad45e1f7007e0cda79c79507bb0d/msgpack-1.1.2-cp314-cp314-win32.whl", hash = "sha256:80a0ff7d4abf5fecb995fcf235d4064b9a9a8a40a3ab80999e6ac1e30b702717", size = 66391 }, + { url = "https://files.pythonhosted.org/packages/6e/f1/abd09c2ae91228c5f3998dbd7f41353def9eac64253de3c8105efa2082f7/msgpack-1.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:9ade919fac6a3e7260b7f64cea89df6bec59104987cbea34d34a2fa15d74310b", size = 73787 }, + { url = "https://files.pythonhosted.org/packages/6a/b0/9d9f667ab48b16ad4115c1935d94023b82b3198064cb84a123e97f7466c1/msgpack-1.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:59415c6076b1e30e563eb732e23b994a61c159cec44deaf584e5cc1dd662f2af", size = 66453 }, + { url = "https://files.pythonhosted.org/packages/16/67/93f80545eb1792b61a217fa7f06d5e5cb9e0055bed867f43e2b8e012e137/msgpack-1.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:897c478140877e5307760b0ea66e0932738879e7aa68144d9b78ea4c8302a84a", size = 85264 }, + { url = "https://files.pythonhosted.org/packages/87/1c/33c8a24959cf193966ef11a6f6a2995a65eb066bd681fd085afd519a57ce/msgpack-1.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a668204fa43e6d02f89dbe79a30b0d67238d9ec4c5bd8a940fc3a004a47b721b", size = 89076 }, + { url = "https://files.pythonhosted.org/packages/fc/6b/62e85ff7193663fbea5c0254ef32f0c77134b4059f8da89b958beb7696f3/msgpack-1.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5559d03930d3aa0f3aacb4c42c776af1a2ace2611871c84a75afe436695e6245", size = 435242 }, + { url = "https://files.pythonhosted.org/packages/c1/47/5c74ecb4cc277cf09f64e913947871682ffa82b3b93c8dad68083112f412/msgpack-1.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:70c5a7a9fea7f036b716191c29047374c10721c389c21e9ffafad04df8c52c90", size = 432509 }, + { url = "https://files.pythonhosted.org/packages/24/a4/e98ccdb56dc4e98c929a3f150de1799831c0a800583cde9fa022fa90602d/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f2cb069d8b981abc72b41aea1c580ce92d57c673ec61af4c500153a626cb9e20", size = 415957 }, + { url = "https://files.pythonhosted.org/packages/da/28/6951f7fb67bc0a4e184a6b38ab71a92d9ba58080b27a77d3e2fb0be5998f/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d62ce1f483f355f61adb5433ebfd8868c5f078d1a52d042b0a998682b4fa8c27", size = 422910 }, + { url = "https://files.pythonhosted.org/packages/f0/03/42106dcded51f0a0b5284d3ce30a671e7bd3f7318d122b2ead66ad289fed/msgpack-1.1.2-cp314-cp314t-win32.whl", hash = "sha256:1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b", size = 75197 }, + { url = "https://files.pythonhosted.org/packages/15/86/d0071e94987f8db59d4eeb386ddc64d0bb9b10820a8d82bcd3e53eeb2da6/msgpack-1.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff", size = 85772 }, + { url = "https://files.pythonhosted.org/packages/81/f2/08ace4142eb281c12701fc3b93a10795e4d4dc7f753911d836675050f886/msgpack-1.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46", size = 70868 }, +] + +[[package]] +name = "opentelemetry-api" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/97/b9/3161be15bb8e3ad01be8be5a968a9237c3027c5be504362ff800fca3e442/opentelemetry_api-1.39.1.tar.gz", hash = "sha256:fbde8c80e1b937a2c61f20347e91c0c18a1940cecf012d62e65a7caf08967c9c", size = 65767 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/df/d3f1ddf4bb4cb50ed9b1139cc7b1c54c34a1e7ce8fd1b9a37c0d1551a6bd/opentelemetry_api-1.39.1-py3-none-any.whl", hash = "sha256:2edd8463432a7f8443edce90972169b195e7d6a05500cd29e6d13898187c9950", size = 66356 }, +] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366 }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538 }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217 }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801 }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backports-asyncio-runner", marker = "python_full_version < '3.11'" }, + { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075 }, +] + +[[package]] +name = "river-client" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "jinja2" }, + { name = "msgpack" }, + { name = "opentelemetry-api" }, + { name = "typing-extensions" }, + { name = "websockets" }, +] + +[package.optional-dependencies] +dev = [ + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "jinja2", specifier = ">=3.0" }, + { name = "msgpack", specifier = ">=1.0" }, + { name = "opentelemetry-api", specifier = ">=1.0" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=8.0" }, + { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.23" }, + { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.4" }, + { name = "typing-extensions", specifier = ">=4.0" }, + { name = "websockets", specifier = ">=12.0" }, +] + +[[package]] +name = "ruff" +version = "0.15.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/da/31/d6e536cdebb6568ae75a7f00e4b4819ae0ad2640c3604c305a0428680b0c/ruff-0.15.4.tar.gz", hash = "sha256:3412195319e42d634470cc97aa9803d07e9d5c9223b99bcb1518f0c725f26ae1", size = 4569550 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f2/82/c11a03cfec3a4d26a0ea1e571f0f44be5993b923f905eeddfc397c13d360/ruff-0.15.4-py3-none-linux_armv6l.whl", hash = "sha256:a1810931c41606c686bae8b5b9a8072adac2f611bb433c0ba476acba17a332e0", size = 10453333 }, + { url = "https://files.pythonhosted.org/packages/ce/5d/6a1f271f6e31dffb31855996493641edc3eef8077b883eaf007a2f1c2976/ruff-0.15.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5a1632c66672b8b4d3e1d1782859e98d6e0b4e70829530666644286600a33992", size = 10853356 }, + { url = "https://files.pythonhosted.org/packages/b1/d8/0fab9f8842b83b1a9c2bf81b85063f65e93fb512e60effa95b0be49bfc54/ruff-0.15.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a4386ba2cd6c0f4ff75252845906acc7c7c8e1ac567b7bc3d373686ac8c222ba", size = 10187434 }, + { url = "https://files.pythonhosted.org/packages/85/cc/cc220fd9394eff5db8d94dec199eec56dd6c9f3651d8869d024867a91030/ruff-0.15.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2496488bdfd3732747558b6f95ae427ff066d1fcd054daf75f5a50674411e75", size = 10535456 }, + { url = "https://files.pythonhosted.org/packages/fa/0f/bced38fa5cf24373ec767713c8e4cadc90247f3863605fb030e597878661/ruff-0.15.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f1c4893841ff2d54cbda1b2860fa3260173df5ddd7b95d370186f8a5e66a4ac", size = 10287772 }, + { url = "https://files.pythonhosted.org/packages/2b/90/58a1802d84fed15f8f281925b21ab3cecd813bde52a8ca033a4de8ab0e7a/ruff-0.15.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:820b8766bd65503b6c30aaa6331e8ef3a6e564f7999c844e9a547c40179e440a", size = 11049051 }, + { url = "https://files.pythonhosted.org/packages/d2/ac/b7ad36703c35f3866584564dc15f12f91cb1a26a897dc2fd13d7cb3ae1af/ruff-0.15.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9fb74bab47139c1751f900f857fa503987253c3ef89129b24ed375e72873e85", size = 11890494 }, + { url = "https://files.pythonhosted.org/packages/93/3d/3eb2f47a39a8b0da99faf9c54d3eb24720add1e886a5309d4d1be73a6380/ruff-0.15.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f80c98765949c518142b3a50a5db89343aa90f2c2bf7799de9986498ae6176db", size = 11326221 }, + { url = "https://files.pythonhosted.org/packages/ff/90/bf134f4c1e5243e62690e09d63c55df948a74084c8ac3e48a88468314da6/ruff-0.15.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:451a2e224151729b3b6c9ffb36aed9091b2996fe4bdbd11f47e27d8f2e8888ec", size = 11168459 }, + { url = "https://files.pythonhosted.org/packages/b5/e5/a64d27688789b06b5d55162aafc32059bb8c989c61a5139a36e1368285eb/ruff-0.15.4-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:a8f157f2e583c513c4f5f896163a93198297371f34c04220daf40d133fdd4f7f", size = 11104366 }, + { url = "https://files.pythonhosted.org/packages/f1/f6/32d1dcb66a2559763fc3027bdd65836cad9eb09d90f2ed6a63d8e9252b02/ruff-0.15.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:917cc68503357021f541e69b35361c99387cdbbf99bd0ea4aa6f28ca99ff5338", size = 10510887 }, + { url = "https://files.pythonhosted.org/packages/ff/92/22d1ced50971c5b6433aed166fcef8c9343f567a94cf2b9d9089f6aa80fe/ruff-0.15.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e9737c8161da79fd7cfec19f1e35620375bd8b2a50c3e77fa3d2c16f574105cc", size = 10285939 }, + { url = "https://files.pythonhosted.org/packages/e6/f4/7c20aec3143837641a02509a4668fb146a642fd1211846634edc17eb5563/ruff-0.15.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:291258c917539e18f6ba40482fe31d6f5ac023994ee11d7bdafd716f2aab8a68", size = 10765471 }, + { url = "https://files.pythonhosted.org/packages/d0/09/6d2f7586f09a16120aebdff8f64d962d7c4348313c77ebb29c566cefc357/ruff-0.15.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3f83c45911da6f2cd5936c436cf86b9f09f09165f033a99dcf7477e34041cbc3", size = 11263382 }, + { url = "https://files.pythonhosted.org/packages/1b/fa/2ef715a1cd329ef47c1a050e10dee91a9054b7ce2fcfdd6a06d139afb7ec/ruff-0.15.4-py3-none-win32.whl", hash = "sha256:65594a2d557d4ee9f02834fcdf0a28daa8b3b9f6cb2cb93846025a36db47ef22", size = 10506664 }, + { url = "https://files.pythonhosted.org/packages/d0/a8/c688ef7e29983976820d18710f955751d9f4d4eb69df658af3d006e2ba3e/ruff-0.15.4-py3-none-win_amd64.whl", hash = "sha256:04196ad44f0df220c2ece5b0e959c2f37c777375ec744397d21d15b50a75264f", size = 11651048 }, + { url = "https://files.pythonhosted.org/packages/3e/0a/9e1be9035b37448ce2e68c978f0591da94389ade5a5abafa4cf99985d1b2/ruff-0.15.4-py3-none-win_arm64.whl", hash = "sha256:60d5177e8cfc70e51b9c5fad936c634872a74209f934c1e79107d11787ad5453", size = 10966776 }, +] + +[[package]] +name = "tomli" +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/82/30/31573e9457673ab10aa432461bee537ce6cef177667deca369efb79df071/tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c", size = 17477 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/d9/3dc2289e1f3b32eb19b9785b6a006b28ee99acb37d1d47f78d4c10e28bf8/tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867", size = 153663 }, + { url = "https://files.pythonhosted.org/packages/51/32/ef9f6845e6b9ca392cd3f64f9ec185cc6f09f0a2df3db08cbe8809d1d435/tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9", size = 148469 }, + { url = "https://files.pythonhosted.org/packages/d6/c2/506e44cce89a8b1b1e047d64bd495c22c9f71f21e05f380f1a950dd9c217/tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95", size = 236039 }, + { url = "https://files.pythonhosted.org/packages/b3/40/e1b65986dbc861b7e986e8ec394598187fa8aee85b1650b01dd925ca0be8/tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76", size = 243007 }, + { url = "https://files.pythonhosted.org/packages/9c/6f/6e39ce66b58a5b7ae572a0f4352ff40c71e8573633deda43f6a379d56b3e/tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d", size = 240875 }, + { url = "https://files.pythonhosted.org/packages/aa/ad/cb089cb190487caa80204d503c7fd0f4d443f90b95cf4ef5cf5aa0f439b0/tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576", size = 246271 }, + { url = "https://files.pythonhosted.org/packages/0b/63/69125220e47fd7a3a27fd0de0c6398c89432fec41bc739823bcc66506af6/tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a", size = 96770 }, + { url = "https://files.pythonhosted.org/packages/1e/0d/a22bb6c83f83386b0008425a6cd1fa1c14b5f3dd4bad05e98cf3dbbf4a64/tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa", size = 107626 }, + { url = "https://files.pythonhosted.org/packages/2f/6d/77be674a3485e75cacbf2ddba2b146911477bd887dda9d8c9dfb2f15e871/tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614", size = 94842 }, + { url = "https://files.pythonhosted.org/packages/3c/43/7389a1869f2f26dba52404e1ef13b4784b6b37dac93bac53457e3ff24ca3/tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1", size = 154894 }, + { url = "https://files.pythonhosted.org/packages/e9/05/2f9bf110b5294132b2edf13fe6ca6ae456204f3d749f623307cbb7a946f2/tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8", size = 149053 }, + { url = "https://files.pythonhosted.org/packages/e8/41/1eda3ca1abc6f6154a8db4d714a4d35c4ad90adc0bcf700657291593fbf3/tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a", size = 243481 }, + { url = "https://files.pythonhosted.org/packages/d2/6d/02ff5ab6c8868b41e7d4b987ce2b5f6a51d3335a70aa144edd999e055a01/tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1", size = 251720 }, + { url = "https://files.pythonhosted.org/packages/7b/57/0405c59a909c45d5b6f146107c6d997825aa87568b042042f7a9c0afed34/tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b", size = 247014 }, + { url = "https://files.pythonhosted.org/packages/2c/0e/2e37568edd944b4165735687cbaf2fe3648129e440c26d02223672ee0630/tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51", size = 251820 }, + { url = "https://files.pythonhosted.org/packages/5a/1c/ee3b707fdac82aeeb92d1a113f803cf6d0f37bdca0849cb489553e1f417a/tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729", size = 97712 }, + { url = "https://files.pythonhosted.org/packages/69/13/c07a9177d0b3bab7913299b9278845fc6eaaca14a02667c6be0b0a2270c8/tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da", size = 108296 }, + { url = "https://files.pythonhosted.org/packages/18/27/e267a60bbeeee343bcc279bb9e8fbed0cbe224bc7b2a3dc2975f22809a09/tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3", size = 94553 }, + { url = "https://files.pythonhosted.org/packages/34/91/7f65f9809f2936e1f4ce6268ae1903074563603b2a2bd969ebbda802744f/tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0", size = 154915 }, + { url = "https://files.pythonhosted.org/packages/20/aa/64dd73a5a849c2e8f216b755599c511badde80e91e9bc2271baa7b2cdbb1/tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e", size = 149038 }, + { url = "https://files.pythonhosted.org/packages/9e/8a/6d38870bd3d52c8d1505ce054469a73f73a0fe62c0eaf5dddf61447e32fa/tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4", size = 242245 }, + { url = "https://files.pythonhosted.org/packages/59/bb/8002fadefb64ab2669e5b977df3f5e444febea60e717e755b38bb7c41029/tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e", size = 250335 }, + { url = "https://files.pythonhosted.org/packages/a5/3d/4cdb6f791682b2ea916af2de96121b3cb1284d7c203d97d92d6003e91c8d/tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c", size = 245962 }, + { url = "https://files.pythonhosted.org/packages/f2/4a/5f25789f9a460bd858ba9756ff52d0830d825b458e13f754952dd15fb7bb/tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f", size = 250396 }, + { url = "https://files.pythonhosted.org/packages/aa/2f/b73a36fea58dfa08e8b3a268750e6853a6aac2a349241a905ebd86f3047a/tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86", size = 97530 }, + { url = "https://files.pythonhosted.org/packages/3b/af/ca18c134b5d75de7e8dc551c5234eaba2e8e951f6b30139599b53de9c187/tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87", size = 108227 }, + { url = "https://files.pythonhosted.org/packages/22/c3/b386b832f209fee8073c8138ec50f27b4460db2fdae9ffe022df89a57f9b/tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132", size = 94748 }, + { url = "https://files.pythonhosted.org/packages/f3/c4/84047a97eb1004418bc10bdbcfebda209fca6338002eba2dc27cc6d13563/tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6", size = 154725 }, + { url = "https://files.pythonhosted.org/packages/a8/5d/d39038e646060b9d76274078cddf146ced86dc2b9e8bbf737ad5983609a0/tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc", size = 148901 }, + { url = "https://files.pythonhosted.org/packages/73/e5/383be1724cb30f4ce44983d249645684a48c435e1cd4f8b5cded8a816d3c/tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66", size = 243375 }, + { url = "https://files.pythonhosted.org/packages/31/f0/bea80c17971c8d16d3cc109dc3585b0f2ce1036b5f4a8a183789023574f2/tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d", size = 250639 }, + { url = "https://files.pythonhosted.org/packages/2c/8f/2853c36abbb7608e3f945d8a74e32ed3a74ee3a1f468f1ffc7d1cb3abba6/tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702", size = 246897 }, + { url = "https://files.pythonhosted.org/packages/49/f0/6c05e3196ed5337b9fe7ea003e95fd3819a840b7a0f2bf5a408ef1dad8ed/tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8", size = 254697 }, + { url = "https://files.pythonhosted.org/packages/f3/f5/2922ef29c9f2951883525def7429967fc4d8208494e5ab524234f06b688b/tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776", size = 98567 }, + { url = "https://files.pythonhosted.org/packages/7b/31/22b52e2e06dd2a5fdbc3ee73226d763b184ff21fc24e20316a44ccc4d96b/tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475", size = 108556 }, + { url = "https://files.pythonhosted.org/packages/48/3d/5058dff3255a3d01b705413f64f4306a141a8fd7a251e5a495e3f192a998/tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2", size = 96014 }, + { url = "https://files.pythonhosted.org/packages/b8/4e/75dab8586e268424202d3a1997ef6014919c941b50642a1682df43204c22/tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9", size = 163339 }, + { url = "https://files.pythonhosted.org/packages/06/e3/b904d9ab1016829a776d97f163f183a48be6a4deb87304d1e0116a349519/tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0", size = 159490 }, + { url = "https://files.pythonhosted.org/packages/e3/5a/fc3622c8b1ad823e8ea98a35e3c632ee316d48f66f80f9708ceb4f2a0322/tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df", size = 269398 }, + { url = "https://files.pythonhosted.org/packages/fd/33/62bd6152c8bdd4c305ad9faca48f51d3acb2df1f8791b1477d46ff86e7f8/tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d", size = 276515 }, + { url = "https://files.pythonhosted.org/packages/4b/ff/ae53619499f5235ee4211e62a8d7982ba9e439a0fb4f2f351a93d67c1dd2/tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f", size = 273806 }, + { url = "https://files.pythonhosted.org/packages/47/71/cbca7787fa68d4d0a9f7072821980b39fbb1b6faeb5f5cf02f4a5559fa28/tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b", size = 281340 }, + { url = "https://files.pythonhosted.org/packages/f5/00/d595c120963ad42474cf6ee7771ad0d0e8a49d0f01e29576ee9195d9ecdf/tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087", size = 108106 }, + { url = "https://files.pythonhosted.org/packages/de/69/9aa0c6a505c2f80e519b43764f8b4ba93b5a0bbd2d9a9de6e2b24271b9a5/tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd", size = 120504 }, + { url = "https://files.pythonhosted.org/packages/b3/9f/f1668c281c58cfae01482f7114a4b88d345e4c140386241a1a24dcc9e7bc/tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4", size = 99561 }, + { url = "https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a", size = 14477 }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614 }, +] + +[[package]] +name = "websockets" +version = "16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", hash = "sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5", size = 179346 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/74/221f58decd852f4b59cc3354cccaf87e8ef695fede361d03dc9a7396573b/websockets-16.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:04cdd5d2d1dacbad0a7bf36ccbcd3ccd5a30ee188f2560b7a62a30d14107b31a", size = 177343 }, + { url = "https://files.pythonhosted.org/packages/19/0f/22ef6107ee52ab7f0b710d55d36f5a5d3ef19e8a205541a6d7ffa7994e5a/websockets-16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8ff32bb86522a9e5e31439a58addbb0166f0204d64066fb955265c4e214160f0", size = 175021 }, + { url = "https://files.pythonhosted.org/packages/10/40/904a4cb30d9b61c0e278899bf36342e9b0208eb3c470324a9ecbaac2a30f/websockets-16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:583b7c42688636f930688d712885cf1531326ee05effd982028212ccc13e5957", size = 175320 }, + { url = "https://files.pythonhosted.org/packages/9d/2f/4b3ca7e106bc608744b1cdae041e005e446124bebb037b18799c2d356864/websockets-16.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7d837379b647c0c4c2355c2499723f82f1635fd2c26510e1f587d89bc2199e72", size = 183815 }, + { url = "https://files.pythonhosted.org/packages/86/26/d40eaa2a46d4302becec8d15b0fc5e45bdde05191e7628405a19cf491ccd/websockets-16.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:df57afc692e517a85e65b72e165356ed1df12386ecb879ad5693be08fac65dde", size = 185054 }, + { url = "https://files.pythonhosted.org/packages/b0/ba/6500a0efc94f7373ee8fefa8c271acdfd4dca8bd49a90d4be7ccabfc397e/websockets-16.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2b9f1e0d69bc60a4a87349d50c09a037a2607918746f07de04df9e43252c77a3", size = 184565 }, + { url = "https://files.pythonhosted.org/packages/04/b4/96bf2cee7c8d8102389374a2616200574f5f01128d1082f44102140344cc/websockets-16.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:335c23addf3d5e6a8633f9f8eda77efad001671e80b95c491dd0924587ece0b3", size = 183848 }, + { url = "https://files.pythonhosted.org/packages/02/8e/81f40fb00fd125357814e8c3025738fc4ffc3da4b6b4a4472a82ba304b41/websockets-16.0-cp310-cp310-win32.whl", hash = "sha256:37b31c1623c6605e4c00d466c9d633f9b812ea430c11c8a278774a1fde1acfa9", size = 178249 }, + { url = "https://files.pythonhosted.org/packages/b4/5f/7e40efe8df57db9b91c88a43690ac66f7b7aa73a11aa6a66b927e44f26fa/websockets-16.0-cp310-cp310-win_amd64.whl", hash = "sha256:8e1dab317b6e77424356e11e99a432b7cb2f3ec8c5ab4dabbcee6add48f72b35", size = 178685 }, + { url = "https://files.pythonhosted.org/packages/f2/db/de907251b4ff46ae804ad0409809504153b3f30984daf82a1d84a9875830/websockets-16.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:31a52addea25187bde0797a97d6fc3d2f92b6f72a9370792d65a6e84615ac8a8", size = 177340 }, + { url = "https://files.pythonhosted.org/packages/f3/fa/abe89019d8d8815c8781e90d697dec52523fb8ebe308bf11664e8de1877e/websockets-16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:417b28978cdccab24f46400586d128366313e8a96312e4b9362a4af504f3bbad", size = 175022 }, + { url = "https://files.pythonhosted.org/packages/58/5d/88ea17ed1ded2079358b40d31d48abe90a73c9e5819dbcde1606e991e2ad/websockets-16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af80d74d4edfa3cb9ed973a0a5ba2b2a549371f8a741e0800cb07becdd20f23d", size = 175319 }, + { url = "https://files.pythonhosted.org/packages/d2/ae/0ee92b33087a33632f37a635e11e1d99d429d3d323329675a6022312aac2/websockets-16.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:08d7af67b64d29823fed316505a89b86705f2b7981c07848fb5e3ea3020c1abe", size = 184631 }, + { url = "https://files.pythonhosted.org/packages/c8/c5/27178df583b6c5b31b29f526ba2da5e2f864ecc79c99dae630a85d68c304/websockets-16.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7be95cfb0a4dae143eaed2bcba8ac23f4892d8971311f1b06f3c6b78952ee70b", size = 185870 }, + { url = "https://files.pythonhosted.org/packages/87/05/536652aa84ddc1c018dbb7e2c4cbcd0db884580bf8e95aece7593fde526f/websockets-16.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d6297ce39ce5c2e6feb13c1a996a2ded3b6832155fcfc920265c76f24c7cceb5", size = 185361 }, + { url = "https://files.pythonhosted.org/packages/6d/e2/d5332c90da12b1e01f06fb1b85c50cfc489783076547415bf9f0a659ec19/websockets-16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c1b30e4f497b0b354057f3467f56244c603a79c0d1dafce1d16c283c25f6e64", size = 184615 }, + { url = "https://files.pythonhosted.org/packages/77/fb/d3f9576691cae9253b51555f841bc6600bf0a983a461c79500ace5a5b364/websockets-16.0-cp311-cp311-win32.whl", hash = "sha256:5f451484aeb5cafee1ccf789b1b66f535409d038c56966d6101740c1614b86c6", size = 178246 }, + { url = "https://files.pythonhosted.org/packages/54/67/eaff76b3dbaf18dcddabc3b8c1dba50b483761cccff67793897945b37408/websockets-16.0-cp311-cp311-win_amd64.whl", hash = "sha256:8d7f0659570eefb578dacde98e24fb60af35350193e4f56e11190787bee77dac", size = 178684 }, + { url = "https://files.pythonhosted.org/packages/84/7b/bac442e6b96c9d25092695578dda82403c77936104b5682307bd4deb1ad4/websockets-16.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:71c989cbf3254fbd5e84d3bff31e4da39c43f884e64f2551d14bb3c186230f00", size = 177365 }, + { url = "https://files.pythonhosted.org/packages/b0/fe/136ccece61bd690d9c1f715baaeefd953bb2360134de73519d5df19d29ca/websockets-16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8b6e209ffee39ff1b6d0fa7bfef6de950c60dfb91b8fcead17da4ee539121a79", size = 175038 }, + { url = "https://files.pythonhosted.org/packages/40/1e/9771421ac2286eaab95b8575b0cb701ae3663abf8b5e1f64f1fd90d0a673/websockets-16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86890e837d61574c92a97496d590968b23c2ef0aeb8a9bc9421d174cd378ae39", size = 175328 }, + { url = "https://files.pythonhosted.org/packages/18/29/71729b4671f21e1eaa5d6573031ab810ad2936c8175f03f97f3ff164c802/websockets-16.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9b5aca38b67492ef518a8ab76851862488a478602229112c4b0d58d63a7a4d5c", size = 184915 }, + { url = "https://files.pythonhosted.org/packages/97/bb/21c36b7dbbafc85d2d480cd65df02a1dc93bf76d97147605a8e27ff9409d/websockets-16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e0334872c0a37b606418ac52f6ab9cfd17317ac26365f7f65e203e2d0d0d359f", size = 186152 }, + { url = "https://files.pythonhosted.org/packages/4a/34/9bf8df0c0cf88fa7bfe36678dc7b02970c9a7d5e065a3099292db87b1be2/websockets-16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0b31e0b424cc6b5a04b8838bbaec1688834b2383256688cf47eb97412531da1", size = 185583 }, + { url = "https://files.pythonhosted.org/packages/47/88/4dd516068e1a3d6ab3c7c183288404cd424a9a02d585efbac226cb61ff2d/websockets-16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:485c49116d0af10ac698623c513c1cc01c9446c058a4e61e3bf6c19dff7335a2", size = 184880 }, + { url = "https://files.pythonhosted.org/packages/91/d6/7d4553ad4bf1c0421e1ebd4b18de5d9098383b5caa1d937b63df8d04b565/websockets-16.0-cp312-cp312-win32.whl", hash = "sha256:eaded469f5e5b7294e2bdca0ab06becb6756ea86894a47806456089298813c89", size = 178261 }, + { url = "https://files.pythonhosted.org/packages/c3/f0/f3a17365441ed1c27f850a80b2bc680a0fa9505d733fe152fdf5e98c1c0b/websockets-16.0-cp312-cp312-win_amd64.whl", hash = "sha256:5569417dc80977fc8c2d43a86f78e0a5a22fee17565d78621b6bb264a115d4ea", size = 178693 }, + { url = "https://files.pythonhosted.org/packages/cc/9c/baa8456050d1c1b08dd0ec7346026668cbc6f145ab4e314d707bb845bf0d/websockets-16.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:878b336ac47938b474c8f982ac2f7266a540adc3fa4ad74ae96fea9823a02cc9", size = 177364 }, + { url = "https://files.pythonhosted.org/packages/7e/0c/8811fc53e9bcff68fe7de2bcbe75116a8d959ac699a3200f4847a8925210/websockets-16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52a0fec0e6c8d9a784c2c78276a48a2bdf099e4ccc2a4cad53b27718dbfd0230", size = 175039 }, + { url = "https://files.pythonhosted.org/packages/aa/82/39a5f910cb99ec0b59e482971238c845af9220d3ab9fa76dd9162cda9d62/websockets-16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e6578ed5b6981005df1860a56e3617f14a6c307e6a71b4fff8c48fdc50f3ed2c", size = 175323 }, + { url = "https://files.pythonhosted.org/packages/bd/28/0a25ee5342eb5d5f297d992a77e56892ecb65e7854c7898fb7d35e9b33bd/websockets-16.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:95724e638f0f9c350bb1c2b0a7ad0e83d9cc0c9259f3ea94e40d7b02a2179ae5", size = 184975 }, + { url = "https://files.pythonhosted.org/packages/f9/66/27ea52741752f5107c2e41fda05e8395a682a1e11c4e592a809a90c6a506/websockets-16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0204dc62a89dc9d50d682412c10b3542d748260d743500a85c13cd1ee4bde82", size = 186203 }, + { url = "https://files.pythonhosted.org/packages/37/e5/8e32857371406a757816a2b471939d51c463509be73fa538216ea52b792a/websockets-16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52ac480f44d32970d66763115edea932f1c5b1312de36df06d6b219f6741eed8", size = 185653 }, + { url = "https://files.pythonhosted.org/packages/9b/67/f926bac29882894669368dc73f4da900fcdf47955d0a0185d60103df5737/websockets-16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f", size = 184920 }, + { url = "https://files.pythonhosted.org/packages/3c/a1/3d6ccdcd125b0a42a311bcd15a7f705d688f73b2a22d8cf1c0875d35d34a/websockets-16.0-cp313-cp313-win32.whl", hash = "sha256:abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a", size = 178255 }, + { url = "https://files.pythonhosted.org/packages/6b/ae/90366304d7c2ce80f9b826096a9e9048b4bb760e44d3b873bb272cba696b/websockets-16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156", size = 178689 }, + { url = "https://files.pythonhosted.org/packages/f3/1d/e88022630271f5bd349ed82417136281931e558d628dd52c4d8621b4a0b2/websockets-16.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8cc451a50f2aee53042ac52d2d053d08bf89bcb31ae799cb4487587661c038a0", size = 177406 }, + { url = "https://files.pythonhosted.org/packages/f2/78/e63be1bf0724eeb4616efb1ae1c9044f7c3953b7957799abb5915bffd38e/websockets-16.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:daa3b6ff70a9241cf6c7fc9e949d41232d9d7d26fd3522b1ad2b4d62487e9904", size = 175085 }, + { url = "https://files.pythonhosted.org/packages/bb/f4/d3c9220d818ee955ae390cf319a7c7a467beceb24f05ee7aaaa2414345ba/websockets-16.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fd3cb4adb94a2a6e2b7c0d8d05cb94e6f1c81a0cf9dc2694fb65c7e8d94c42e4", size = 175328 }, + { url = "https://files.pythonhosted.org/packages/63/bc/d3e208028de777087e6fb2b122051a6ff7bbcca0d6df9d9c2bf1dd869ae9/websockets-16.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:781caf5e8eee67f663126490c2f96f40906594cb86b408a703630f95550a8c3e", size = 185044 }, + { url = "https://files.pythonhosted.org/packages/ad/6e/9a0927ac24bd33a0a9af834d89e0abc7cfd8e13bed17a86407a66773cc0e/websockets-16.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:caab51a72c51973ca21fa8a18bd8165e1a0183f1ac7066a182ff27107b71e1a4", size = 186279 }, + { url = "https://files.pythonhosted.org/packages/b9/ca/bf1c68440d7a868180e11be653c85959502efd3a709323230314fda6e0b3/websockets-16.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19c4dc84098e523fd63711e563077d39e90ec6702aff4b5d9e344a60cb3c0cb1", size = 185711 }, + { url = "https://files.pythonhosted.org/packages/c4/f8/fdc34643a989561f217bb477cbc47a3a07212cbda91c0e4389c43c296ebf/websockets-16.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a5e18a238a2b2249c9a9235466b90e96ae4795672598a58772dd806edc7ac6d3", size = 184982 }, + { url = "https://files.pythonhosted.org/packages/dd/d1/574fa27e233764dbac9c52730d63fcf2823b16f0856b3329fc6268d6ae4f/websockets-16.0-cp314-cp314-win32.whl", hash = "sha256:a069d734c4a043182729edd3e9f247c3b2a4035415a9172fd0f1b71658a320a8", size = 177915 }, + { url = "https://files.pythonhosted.org/packages/8a/f1/ae6b937bf3126b5134ce1f482365fde31a357c784ac51852978768b5eff4/websockets-16.0-cp314-cp314-win_amd64.whl", hash = "sha256:c0ee0e63f23914732c6d7e0cce24915c48f3f1512ec1d079ed01fc629dab269d", size = 178381 }, + { url = "https://files.pythonhosted.org/packages/06/9b/f791d1db48403e1f0a27577a6beb37afae94254a8c6f08be4a23e4930bc0/websockets-16.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:a35539cacc3febb22b8f4d4a99cc79b104226a756aa7400adc722e83b0d03244", size = 177737 }, + { url = "https://files.pythonhosted.org/packages/bd/40/53ad02341fa33b3ce489023f635367a4ac98b73570102ad2cdd770dacc9a/websockets-16.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b784ca5de850f4ce93ec85d3269d24d4c82f22b7212023c974c401d4980ebc5e", size = 175268 }, + { url = "https://files.pythonhosted.org/packages/74/9b/6158d4e459b984f949dcbbb0c5d270154c7618e11c01029b9bbd1bb4c4f9/websockets-16.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:569d01a4e7fba956c5ae4fc988f0d4e187900f5497ce46339c996dbf24f17641", size = 175486 }, + { url = "https://files.pythonhosted.org/packages/e5/2d/7583b30208b639c8090206f95073646c2c9ffd66f44df967981a64f849ad/websockets-16.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50f23cdd8343b984957e4077839841146f67a3d31ab0d00e6b824e74c5b2f6e8", size = 185331 }, + { url = "https://files.pythonhosted.org/packages/45/b0/cce3784eb519b7b5ad680d14b9673a31ab8dcb7aad8b64d81709d2430aa8/websockets-16.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:152284a83a00c59b759697b7f9e9cddf4e3c7861dd0d964b472b70f78f89e80e", size = 186501 }, + { url = "https://files.pythonhosted.org/packages/19/60/b8ebe4c7e89fb5f6cdf080623c9d92789a53636950f7abacfc33fe2b3135/websockets-16.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bc59589ab64b0022385f429b94697348a6a234e8ce22544e3681b2e9331b5944", size = 186062 }, + { url = "https://files.pythonhosted.org/packages/88/a8/a080593f89b0138b6cba1b28f8df5673b5506f72879322288b031337c0b8/websockets-16.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206", size = 185356 }, + { url = "https://files.pythonhosted.org/packages/c2/b6/b9afed2afadddaf5ebb2afa801abf4b0868f42f8539bfe4b071b5266c9fe/websockets-16.0-cp314-cp314t-win32.whl", hash = "sha256:5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6", size = 178085 }, + { url = "https://files.pythonhosted.org/packages/9f/3e/28135a24e384493fa804216b79a6a6759a38cc4ff59118787b9fb693df93/websockets-16.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd", size = 178531 }, + { url = "https://files.pythonhosted.org/packages/72/07/c98a68571dcf256e74f1f816b8cc5eae6eb2d3d5cfa44d37f801619d9166/websockets-16.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:349f83cd6c9a415428ee1005cadb5c2c56f4389bc06a9af16103c3bc3dcc8b7d", size = 174947 }, + { url = "https://files.pythonhosted.org/packages/7e/52/93e166a81e0305b33fe416338be92ae863563fe7bce446b0f687b9df5aea/websockets-16.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:4a1aba3340a8dca8db6eb5a7986157f52eb9e436b74813764241981ca4888f03", size = 175260 }, + { url = "https://files.pythonhosted.org/packages/56/0c/2dbf513bafd24889d33de2ff0368190a0e69f37bcfa19009ef819fe4d507/websockets-16.0-pp311-pypy311_pp73-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f4a32d1bd841d4bcbffdcb3d2ce50c09c3909fbead375ab28d0181af89fd04da", size = 176071 }, + { url = "https://files.pythonhosted.org/packages/a5/8f/aea9c71cc92bf9b6cc0f7f70df8f0b420636b6c96ef4feee1e16f80f75dd/websockets-16.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0298d07ee155e2e9fda5be8a9042200dd2e3bb0b8a38482156576f863a9d457c", size = 176968 }, + { url = "https://files.pythonhosted.org/packages/9a/3f/f70e03f40ffc9a30d817eef7da1be72ee4956ba8d7255c399a01b135902a/websockets-16.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:a653aea902e0324b52f1613332ddf50b00c06fdaf7e92624fbf8c77c78fa5767", size = 178735 }, + { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598 }, +] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276 }, +]