diff --git a/extended/__init__.py b/extended/__init__.py new file mode 100644 index 0000000..9625633 --- /dev/null +++ b/extended/__init__.py @@ -0,0 +1,115 @@ +""" +Extended Exchange SDK - Hyperliquid-compatible interface. + +This SDK provides a Hyperliquid/Pacifica-compatible interface for Extended Exchange, +allowing seamless integration with existing trading engines. + +Usage: + # Using Client class + from extended import Client + + client = Client( + api_key="your-api-key", + vault=12345, + stark_private_key="0x...", + stark_public_key="0x...", + testnet=True, + ) + + # Info operations + state = client.info.user_state() + orders = client.info.open_orders() + + # Exchange operations + client.exchange.order("BTC", is_buy=True, sz=0.01, limit_px=50000) + client.exchange.cancel("BTC", oid=12345) + + # Using setup() function (Hyperliquid-style) + from extended import setup + + address, info, exchange = setup( + api_key="your-api-key", + vault=12345, + stark_private_key="0x...", + stark_public_key="0x...", + testnet=True + ) + + state = info.user_state() + exchange.order("BTC", is_buy=True, sz=0.01, limit_px=50000) + +Note: + Credentials (api_key, vault, stark keys) must be obtained from your + onboarding infrastructure. This SDK does not perform onboarding. + +Key Differences from Hyperliquid: + - Credentials: Extended requires 4 credentials from external onboarding + - No address param: Can't query other users (Extended requires auth) + - Market names: Extended uses "BTC-USD" internally (auto-converted from "BTC") + - Bulk orders: Not atomic on Extended + - Isolated margin: Not supported on Extended + - Market orders: Simulated as IOC limit orders (may not fill completely) +""" + +__version__ = "0.1.0" + +# Main clients - NATIVE SYNC ONLY +from extended.client import Client + +# Setup function (Hyperliquid-style) - NATIVE SYNC ONLY +from extended.setup import setup + +# API classes (for type hints) - NATIVE SYNC ONLY +from extended.api import InfoAPI, ExchangeAPI + +# Configuration +from extended.config_sync import TESTNET_CONFIG, MAINNET_CONFIG, SimpleSyncConfig as EndpointConfig + +# Exceptions +from extended.exceptions_sync import ( + ExtendedError, + ExtendedAPIError, + ExtendedAuthError, + ExtendedRateLimitError, + ExtendedValidationError, + ExtendedNotFoundError, +) + +# Simple types to avoid dependencies +class Side: + BUY = "BUY" + SELL = "SELL" + +class TimeInForce: + GTC = "GTC" + IOC = "IOC" + ALO = "ALO" + +OrderTypeSpec = dict +BuilderInfo = dict + +__all__ = [ + # Version + "__version__", + # Main clients - NATIVE SYNC ONLY + "Client", + # API classes - NATIVE SYNC ONLY + "InfoAPI", + "ExchangeAPI", + # Config + "TESTNET_CONFIG", + "MAINNET_CONFIG", + "EndpointConfig", + # Exceptions + "ExtendedError", + "ExtendedAPIError", + "ExtendedAuthError", + "ExtendedRateLimitError", + "ExtendedValidationError", + "ExtendedNotFoundError", + # Types + "Side", + "TimeInForce", + "OrderTypeSpec", + "BuilderInfo", +] diff --git a/extended/api/__init__.py b/extended/api/__init__.py new file mode 100644 index 0000000..d944bc1 --- /dev/null +++ b/extended/api/__init__.py @@ -0,0 +1,12 @@ +""" +API modules for Extended Exchange SDK. +""" + +# Only import sync APIs - async imports removed to eliminate dependencies +from extended.api.info import InfoAPI +from extended.api.exchange import ExchangeAPI + +__all__ = [ + "InfoAPI", + "ExchangeAPI", +] diff --git a/extended/api/base.py b/extended/api/base.py new file mode 100644 index 0000000..ff8c0b3 --- /dev/null +++ b/extended/api/base.py @@ -0,0 +1,39 @@ +""" +Base sync API class for Extended Exchange SDK. + +Provides common functionality for sync API classes using native sync implementation. +Uses native sync implementation instead of wrapper approach. +""" + +from typing import Any, TypeVar + +from extended.api.base_native_sync import BaseNativeSyncClient +from extended.auth_sync import SimpleSyncAuth +from extended.config_sync import SimpleSyncConfig + +T = TypeVar("T") + + +class BaseSyncAPI(BaseNativeSyncClient): + """ + Base class for sync API implementations. + + Uses native sync HTTP operations instead of wrapping async operations. + MIRRORS Pacifica's BaseAPIClient approach exactly. + """ + + def __init__(self, auth: SimpleSyncAuth, config: SimpleSyncConfig): + """ + Initialize the base API. + + Args: + auth: SimpleSyncAuth instance with credentials + config: SimpleSyncConfig configuration + """ + # Use native sync client directly + super().__init__(auth, config) + + def close(self): + """Close the API and release resources.""" + # No async cleanup needed for native sync implementation + pass \ No newline at end of file diff --git a/extended/api/base_async.py b/extended/api/base_async.py new file mode 100644 index 0000000..22c4c17 --- /dev/null +++ b/extended/api/base_async.py @@ -0,0 +1,63 @@ +""" +Base async API class for Extended Exchange SDK. + +Provides common functionality for async API classes. +""" + +from typing import Any, Awaitable, Callable, List, TypeVar + +from x10.perpetual.configuration import EndpointConfig +from x10.perpetual.trading_client import PerpetualTradingClient + +from extended.auth import ExtendedAuth +from extended.utils.async_helpers import thread_safe_gather + +T = TypeVar("T") + + +class BaseAsyncAPI: + """ + Base class for async API implementations. + + Provides access to the Extended trading client and common utilities. + """ + + def __init__(self, auth: ExtendedAuth, config: EndpointConfig): + """ + Initialize the base API. + + Args: + auth: ExtendedAuth instance with credentials + config: Endpoint configuration + """ + self._auth = auth + self._config = config + self._client: PerpetualTradingClient = auth.get_trading_client() + + @property + def trading_client(self) -> PerpetualTradingClient: + """Get the underlying trading client.""" + return self._client + + async def execute_parallel( + self, + tasks: List[Callable[[], Awaitable[T]]], + ) -> List[T]: + """ + Execute multiple async tasks in parallel. + + Uses thread-safe gather to prevent "Future attached to + different loop" errors in ThreadPoolExecutor contexts. + + Args: + tasks: List of async callables + + Returns: + List of results from all tasks + """ + coroutines = [task() for task in tasks] + return await thread_safe_gather(*coroutines) + + async def close(self): + """Close the API and release resources.""" + await self._auth.close() diff --git a/extended/api/base_native_sync.py b/extended/api/base_native_sync.py new file mode 100644 index 0000000..38c3605 --- /dev/null +++ b/extended/api/base_native_sync.py @@ -0,0 +1,156 @@ +""" +Base native sync API client for Extended Exchange SDK. + +MIRRORS Pacifica's BaseAPIClient architecture exactly. +Uses requests.Session() for native synchronous operation. +""" + +import requests +import time +import logging +from typing import Dict, Optional, Any, List +from urllib.parse import urljoin + +from extended.auth_sync import SimpleSyncAuth +from extended.config_sync import SimpleSyncConfig + +# Simple exception class to avoid async dependencies +class ExtendedAPIError(Exception): + def __init__(self, status_code: int, message: str, data=None): + self.status_code = status_code + self.message = message + self.data = data + super().__init__(message) + +logger = logging.getLogger(__name__) + + +class BaseNativeSyncClient: + """ + Base native sync HTTP client - EXACT MIRROR of Pacifica's BaseAPIClient. + + Uses requests.Session() for native sync HTTP operations, + eliminating all async/event loop dependencies. + """ + + def __init__( + self, + auth: Optional[SimpleSyncAuth] = None, + config: Optional[SimpleSyncConfig] = None, + timeout: int = 30 + ): + """ + Initialize base API client. + + Args: + auth: SimpleSyncAuth instance with credentials + config: SimpleSyncConfig configuration + timeout: Request timeout in seconds + """ + self.auth = auth + self.config = config + self.timeout = timeout + + # Native sync session - SAME AS PACIFICA + self.session = requests.Session() + self.session.headers.update({ + "Content-Type": "application/json", + "Accept": "application/json" + }) + + def _request( + self, + method: str, + endpoint: str, + params: Optional[Dict] = None, + data: Optional[Dict] = None, + authenticated: bool = False, + additional_headers: Optional[Dict] = None + ) -> Dict[str, Any]: + """ + Make an API request - EXACT COPY of Pacifica's _request method. + + Args: + method: HTTP method + endpoint: API endpoint + params: Query parameters + data: Request body + authenticated: Whether request needs authentication + additional_headers: Additional headers + + Returns: + API response data + + Raises: + ExtendedAPIError: On API errors + """ + # Fix URL construction: ensure proper path joining + if endpoint.startswith('/'): + url = self.config.api_base_url + endpoint + else: + url = f"{self.config.api_base_url}/{endpoint}" + + headers = {} + if authenticated and self.auth: + headers["X-Api-Key"] = self.auth.api_key + + if additional_headers: + headers.update(additional_headers) + + logger.debug(f"{method} {url} params={params}") + + try: + response = self.session.request( + method, + url, + params=params, + json=data, + headers=headers, + timeout=self.timeout + ) + + # Handle HTTP errors + if response.status_code >= 400: + try: + error_data = response.json() + message = error_data.get("msg", response.text) + except: + message = response.text + raise ExtendedAPIError(response.status_code, message) + + result = response.json() + + # Check for API-level errors in response + if not result.get("success", True): + raise ExtendedAPIError( + response.status_code, + result.get("msg", "Request failed"), + result + ) + + return result + + except requests.RequestException as e: + logger.error(f"Request failed: {e}") + raise ExtendedAPIError(500, str(e)) + + def get(self, endpoint: str, params: Optional[Dict] = None, authenticated: bool = False) -> Dict: + """GET request""" + return self._request("GET", endpoint, params=params, authenticated=authenticated) + + def post(self, endpoint: str, data: Optional[Dict] = None, authenticated: bool = True, headers: Optional[Dict] = None) -> Dict: + """POST request with optional headers""" + return self._request("POST", endpoint, data=data, authenticated=authenticated, additional_headers=headers) + + def delete(self, endpoint: str, params: Optional[Dict] = None, authenticated: bool = True) -> Dict: + """DELETE request""" + return self._request("DELETE", endpoint, params=params, authenticated=authenticated) + + def patch(self, endpoint: str, data: Optional[Dict] = None, authenticated: bool = True) -> Dict: + """PATCH request""" + return self._request("PATCH", endpoint, data=data, authenticated=authenticated) + + def close(self): + """Close the session.""" + if self.session: + self.session.close() \ No newline at end of file diff --git a/extended/api/base_new_sync.py b/extended/api/base_new_sync.py new file mode 100644 index 0000000..e8fadad --- /dev/null +++ b/extended/api/base_new_sync.py @@ -0,0 +1,40 @@ +""" +Base sync API class for Extended Exchange SDK. + +Provides common functionality for sync API classes using native sync implementation. +REPLACES the problematic run_sync() wrapper approach. +""" + +from typing import Any, TypeVar + +from x10.perpetual.configuration import EndpointConfig + +from extended.api.base_native_sync import BaseNativeSyncClient +from extended.auth import ExtendedAuth + +T = TypeVar("T") + + +class BaseSyncAPI(BaseNativeSyncClient): + """ + Base class for sync API implementations. + + Uses native sync HTTP operations instead of wrapping async operations. + MIRRORS Pacifica's BaseAPIClient approach exactly. + """ + + def __init__(self, auth: ExtendedAuth, config: EndpointConfig): + """ + Initialize the base API. + + Args: + auth: ExtendedAuth instance with credentials + config: Endpoint configuration + """ + # Use native sync client directly + super().__init__(auth, config) + + def close(self): + """Close the API and release resources.""" + # No async cleanup needed for native sync implementation + pass \ No newline at end of file diff --git a/extended/api/base_old_async_wrapper.py b/extended/api/base_old_async_wrapper.py new file mode 100644 index 0000000..0b44f59 --- /dev/null +++ b/extended/api/base_old_async_wrapper.py @@ -0,0 +1,37 @@ +""" +Base sync API class for Extended Exchange SDK. + +Provides common functionality for sync API classes by wrapping async operations. +""" + +from typing import Any, TypeVar + +from x10.perpetual.configuration import EndpointConfig + +from extended.auth import ExtendedAuth +from extended.utils.helpers import run_sync + +T = TypeVar("T") + + +class BaseSyncAPI: + """ + Base class for sync API implementations. + + Wraps async API operations to provide a synchronous interface. + """ + + def __init__(self, auth: ExtendedAuth, config: EndpointConfig): + """ + Initialize the base API. + + Args: + auth: ExtendedAuth instance with credentials + config: Endpoint configuration + """ + self._auth = auth + self._config = config + + def close(self): + """Close the API and release resources.""" + run_sync(self._auth.close()) diff --git a/extended/api/exchange.py b/extended/api/exchange.py new file mode 100644 index 0000000..67bce4a --- /dev/null +++ b/extended/api/exchange.py @@ -0,0 +1,44 @@ +""" +Sync Exchange API for Extended Exchange SDK. + +Provides trading operations matching Hyperliquid's Exchange class interface. +Uses native sync implementation instead of async wrapper. +""" + +from typing import Any, Dict, List, Optional + +from extended.api.base_native_sync import BaseNativeSyncClient +from extended.api.exchange_native_sync import NativeSyncExchangeAPI +from extended.auth_sync import SimpleSyncAuth +from extended.config_sync import SimpleSyncConfig + + +class ExchangeAPI(NativeSyncExchangeAPI): + """ + Extended Exchange trading API with Hyperliquid-compatible interface. + + Native synchronous implementation - pure sync. + + Handles order placement, cancellation, and account management. + + Example: + exchange = ExchangeAPI(auth, config) + result = exchange.order("BTC", is_buy=True, sz=0.01, limit_px=50000) + exchange.cancel("BTC", oid=12345) + """ + + def __init__(self, auth: SimpleSyncAuth, config: SimpleSyncConfig): + """ + Initialize the sync Exchange API. + + Args: + auth: SimpleSyncAuth instance with credentials + config: SimpleSyncConfig configuration + """ + # Use native sync implementation directly + super().__init__(auth, config) + + def close(self): + """Close the API and release resources.""" + # No async cleanup needed for native sync implementation + pass \ No newline at end of file diff --git a/extended/api/exchange_async.py b/extended/api/exchange_async.py new file mode 100644 index 0000000..9591c17 --- /dev/null +++ b/extended/api/exchange_async.py @@ -0,0 +1,475 @@ +""" +Async Exchange API for Extended Exchange SDK. + +Provides trading operations matching Hyperliquid's Exchange class interface. +""" + +import warnings +from datetime import datetime, timedelta +from decimal import Decimal +from typing import Any, Dict, List, Optional + +from x10.perpetual.configuration import EndpointConfig +from x10.perpetual.orders import OrderSide, TimeInForce as X10TimeInForce + +from extended.api.base_async import BaseAsyncAPI +from extended.auth import ExtendedAuth +from extended.exceptions import ExtendedAPIError, ExtendedValidationError +from extended.transformers import OrderTransformer +from extended.utils.async_helpers import thread_safe_gather +from extended.utils.constants import ( + DEFAULT_SLIPPAGE, + MARKET_ORDER_PRICE_CAP, + MARKET_ORDER_PRICE_FLOOR, + SIDE_MAPPING, +) +from extended.utils.helpers import normalize_market_name, parse_builder, parse_order_type + + +class AsyncExchangeAPI(BaseAsyncAPI): + """ + Extended Exchange trading API with Hyperliquid-compatible interface. + + Handles order placement, cancellation, and account management. + + Example: + async_exchange = AsyncExchangeAPI(auth, config) + result = await async_exchange.order("BTC", is_buy=True, sz=0.01, limit_px=50000) + await async_exchange.cancel("BTC", oid=12345) + """ + + def __init__(self, auth: ExtendedAuth, config: EndpointConfig): + """ + Initialize the async Exchange API. + + Args: + auth: ExtendedAuth instance with credentials + config: Endpoint configuration + """ + super().__init__(auth, config) + + async def order( + self, + name: str, + is_buy: bool, + sz: float, + limit_px: float, + order_type: Optional[Dict[str, Any]] = None, + reduce_only: bool = False, + cloid: Optional[str] = None, + builder: Optional[Dict[str, Any]] = None, + ) -> Dict[str, Any]: + """ + Place a limit order. + + Args: + name: Market name (e.g., "BTC" or "BTC-USD") + is_buy: True for buy, False for sell + sz: Size in base asset + limit_px: Limit price + order_type: {"limit": {"tif": "Gtc"}} or {"limit": {"tif": "Ioc"}} + or {"limit": {"tif": "Alo"}} (post-only) + reduce_only: Only reduce position + cloid: Client order ID (maps to external_id) + builder: {"b": builder_id, "f": fee_bps_tenths} + + Returns: + Hyperliquid-format response: + {"status": "ok", "response": {"type": "order", "data": {"statuses": [...]}}} + """ + if order_type is None: + order_type = {"limit": {"tif": "Gtc"}} + + market_name = normalize_market_name(name) + side = OrderSide.BUY if is_buy else OrderSide.SELL + tif, post_only = parse_order_type(order_type) + builder_id, builder_fee = parse_builder(builder) + + try: + response = await self._client.place_order( + market_name=market_name, + amount_of_synthetic=Decimal(str(sz)), + price=Decimal(str(limit_px)), + side=side, + post_only=post_only, + time_in_force=tif, + external_id=cloid, + builder_id=builder_id, + builder_fee=builder_fee, + reduce_only=reduce_only, + ) + + return OrderTransformer.transform_order_response(response.data) + + except Exception as e: + return OrderTransformer.transform_error_response(str(e)) + + async def bulk_orders( + self, + order_requests: List[Dict[str, Any]], + builder: Optional[Dict[str, Any]] = None, + grouping: str = "na", + ) -> Dict[str, Any]: + """ + Place multiple orders in parallel. + + WARNING: Unlike Hyperliquid, Extended does not support atomic + bulk orders. Orders are sent in parallel and may partially fail. + + Args: + order_requests: List of order dicts with keys: + - coin, is_buy, sz, limit_px, order_type, reduce_only, cloid + builder: Builder info applied to all orders + grouping: Ignored (no native support) + + Returns: + Combined results from all orders + """ + async def place_single(request: Dict[str, Any]) -> Dict[str, Any]: + try: + result = await self.order( + name=request["coin"], + is_buy=request["is_buy"], + sz=request["sz"], + limit_px=request["limit_px"], + order_type=request.get("order_type", {"limit": {"tif": "Gtc"}}), + reduce_only=request.get("reduce_only", False), + cloid=request.get("cloid"), + builder=builder or request.get("builder"), + ) + + # Extract the placed order data from the response + if result.get("status") == "ok": + statuses = result.get("response", {}).get("data", {}).get("statuses", []) + if statuses and "resting" in statuses[0]: + return { + "status": "ok", + "data": { + "id": statuses[0]["resting"]["oid"], + "external_id": statuses[0]["resting"]["cloid"], + } + } + return {"status": "error", "error": result.get("response", "Unknown error")} + + except Exception as e: + return {"status": "error", "error": str(e)} + + # Execute all orders in parallel + results = await thread_safe_gather( + *[place_single(req) for req in order_requests], + return_exceptions=True, + ) + + # Process results + processed_results = [] + for result in results: + if isinstance(result, Exception): + processed_results.append({"status": "error", "error": str(result)}) + else: + processed_results.append(result) + + return OrderTransformer.transform_bulk_orders_response(processed_results) + + async def cancel( + self, + name: str, + oid: Optional[int] = None, + cloid: Optional[str] = None, + ) -> Dict[str, Any]: + """ + Cancel an order by oid or cloid. + + Args: + name: Market name (required for Hyperliquid compat, may be ignored) + oid: Internal order ID + cloid: Client order ID (external_id) + + Returns: + Hyperliquid-format response: + {"status": "ok", "response": {"type": "cancel", "data": {"statuses": [...]}}} + """ + if oid is None and cloid is None: + raise ExtendedValidationError("Either oid or cloid must be provided") + + try: + if oid is not None: + await self._client.orders.cancel_order(order_id=oid) + else: + await self._client.orders.cancel_order_by_external_id( + order_external_id=cloid # type: ignore + ) + + return OrderTransformer.transform_cancel_response(success=True, order_id=oid) + + except Exception as e: + return OrderTransformer.transform_error_response(str(e)) + + async def cancel_by_cloid(self, name: str, cloid: str) -> Dict[str, Any]: + """ + Cancel order by client order ID. + + Args: + name: Market name + cloid: Client order ID + + Returns: + Cancel response in Hyperliquid format + """ + return await self.cancel(name, cloid=cloid) + + async def bulk_cancel( + self, + cancel_requests: List[Dict[str, Any]], + ) -> Dict[str, Any]: + """ + Cancel multiple orders. + + Args: + cancel_requests: List of {"coin": str, "oid": int} + or {"coin": str, "cloid": str} + + Returns: + Combined cancel results + """ + # Group by oids and cloids for mass cancel + oids = [] + cloids = [] + + for req in cancel_requests: + if "oid" in req and req["oid"] is not None: + oids.append(req["oid"]) + elif "cloid" in req and req["cloid"] is not None: + cloids.append(req["cloid"]) + + try: + await self._client.orders.mass_cancel( + order_ids=oids if oids else None, + external_order_ids=cloids if cloids else None, + ) + + return { + "status": "ok", + "response": { + "type": "cancel", + "data": { + "statuses": ["success"] * len(cancel_requests) + }, + }, + } + + except Exception as e: + return OrderTransformer.transform_error_response(str(e)) + + async def update_leverage( + self, + leverage: int, + name: str, + is_cross: bool = True, + ) -> Dict[str, Any]: + """ + Update leverage for a market. + + Args: + leverage: Target leverage (1-50) + name: Market name + is_cross: Ignored (Extended only supports cross margin) + + Returns: + Hyperliquid-format response: + {"status": "ok", "response": {"type": "leverage"}} + """ + if not is_cross: + warnings.warn( + "Extended Exchange only supports cross margin. " + "is_cross=False will be ignored.", + UserWarning, + ) + + market_name = normalize_market_name(name) + + try: + await self._client.account.update_leverage( + market_name=market_name, + leverage=Decimal(leverage), + ) + + return OrderTransformer.transform_leverage_response() + + except Exception as e: + return OrderTransformer.transform_error_response(str(e)) + + async def _calculate_market_order_price( + self, + name: str, + is_buy: bool, + slippage: float, + ) -> Decimal: + """ + Calculate limit price for simulated market order. + + Extended constraints: + - Buy: price <= mark_price * 1.05 + - Sell: price >= mark_price * 0.95 + + Args: + name: Market name + is_buy: True for buy + slippage: Max slippage (e.g., 0.05 for 5%) + + Returns: + Calculated limit price (rounded to market precision) + """ + from decimal import ROUND_CEILING, ROUND_FLOOR + + market_name = normalize_market_name(name) + + # Get orderbook, market stats, and market config in parallel + orderbook_task = self._client.markets_info.get_orderbook_snapshot( + market_name=market_name + ) + stats_task = self._client.markets_info.get_market_statistics( + market_name=market_name + ) + markets_task = self._client.markets_info.get_markets_dict() + + orderbook_response, stats_response, markets_dict = await thread_safe_gather( + orderbook_task, stats_task, markets_task + ) + + orderbook = orderbook_response.data + stats = stats_response.data + mark_price = stats.mark_price + market = markets_dict[market_name] + + if is_buy: + # Use best ask with slippage, capped at mark * 1.05 + best_ask = ( + orderbook.ask[0].price + if orderbook.ask + else mark_price + ) + target_price = best_ask * Decimal(1 + slippage) + max_price = mark_price * Decimal(str(MARKET_ORDER_PRICE_CAP)) + price = min(target_price, max_price) + # Round up for buys to ensure fill + return market.trading_config.round_price(price, ROUND_CEILING) + else: + # Use best bid with slippage, floored at mark * 0.95 + best_bid = ( + orderbook.bid[0].price + if orderbook.bid + else mark_price + ) + target_price = best_bid * Decimal(1 - slippage) + min_price = mark_price * Decimal(str(MARKET_ORDER_PRICE_FLOOR)) + price = max(target_price, min_price) + # Round down for sells to ensure fill + return market.trading_config.round_price(price, ROUND_FLOOR) + + async def market_open( + self, + name: str, + is_buy: bool, + sz: float, + px: Optional[float] = None, + slippage: float = DEFAULT_SLIPPAGE, + cloid: Optional[str] = None, + builder: Optional[Dict[str, Any]] = None, + ) -> Dict[str, Any]: + """ + Open a position with a market order. + + Note: Extended simulates market orders as IOC limit orders. + + Args: + name: Market name + is_buy: True for long, False for short + sz: Size in base asset + px: Optional price hint (uses orderbook if not provided) + slippage: Max slippage (default 5%) + cloid: Client order ID + builder: Builder info + + Returns: + Order response in Hyperliquid format + """ + if px is not None: + limit_price = Decimal(str(px)) + else: + limit_price = await self._calculate_market_order_price( + name, is_buy, slippage + ) + + return await self.order( + name=name, + is_buy=is_buy, + sz=sz, + limit_px=float(limit_price), + order_type={"limit": {"tif": "Ioc"}}, + reduce_only=False, + cloid=cloid, + builder=builder, + ) + + async def market_close( + self, + coin: str, + sz: Optional[float] = None, + px: Optional[float] = None, + slippage: float = DEFAULT_SLIPPAGE, + cloid: Optional[str] = None, + builder: Optional[Dict[str, Any]] = None, + ) -> Dict[str, Any]: + """ + Close a position with a market order. + + Args: + coin: Market name + sz: Size to close (None = close entire position) + px: Optional price hint + slippage: Max slippage + cloid: Client order ID + builder: Builder info + + Returns: + Order response in Hyperliquid format + """ + market_name = normalize_market_name(coin) + + # Get current position to determine size and side + positions_response = await self._client.account.get_positions( + market_names=[market_name] + ) + + positions = positions_response.data or [] + if not positions: + return OrderTransformer.transform_error_response( + f"No open position found for {coin}" + ) + + position = positions[0] + + # Determine size to close + close_sz = float(sz) if sz is not None else float(position.size) + + # Close is opposite side (side can be str or PositionSide enum) + side = position.side.value if hasattr(position.side, 'value') else position.side + is_buy = side == "SHORT" + + if px is not None: + limit_price = Decimal(str(px)) + else: + limit_price = await self._calculate_market_order_price( + coin, is_buy, slippage + ) + + return await self.order( + name=coin, + is_buy=is_buy, + sz=close_sz, + limit_px=float(limit_price), + order_type={"limit": {"tif": "Ioc"}}, + reduce_only=True, + cloid=cloid, + builder=builder, + ) diff --git a/extended/api/exchange_native_sync.py b/extended/api/exchange_native_sync.py new file mode 100644 index 0000000..76db9e7 --- /dev/null +++ b/extended/api/exchange_native_sync.py @@ -0,0 +1,584 @@ +""" +Native Sync Exchange API for Extended Exchange SDK. + +Provides trading operations matching Hyperliquid's Exchange class interface. +Uses direct HTTP calls with requests and X10 signing infrastructure. +""" + +import time +import warnings +from decimal import Decimal, ROUND_CEILING, ROUND_FLOOR +from typing import Any, Dict, List, Optional + +from extended.api.base_native_sync import BaseNativeSyncClient, ExtendedAPIError +from extended.auth_sync import SimpleSyncAuth +from extended.config_sync import SimpleSyncConfig +from extended.transformers_sync import ( + SyncOrderTransformer, + normalize_market_name, + to_hyperliquid_market_name, +) + +# Import X10 signing infrastructure (all sync!) +from x10.perpetual.accounts import StarkPerpetualAccount +from x10.perpetual.configuration import StarknetDomain +from x10.perpetual.markets import MarketModel +from x10.perpetual.order_object import create_order_object +from x10.perpetual.orders import OrderSide, TimeInForce + +# Constants +DEFAULT_SLIPPAGE = 0.05 +MARKET_ORDER_PRICE_CAP = 1.05 +MARKET_ORDER_PRICE_FLOOR = 0.95 + +# Time in force mapping (Hyperliquid -> X10) +TIF_MAPPING = { + "Gtc": TimeInForce.GTT, + "Ioc": TimeInForce.IOC, + "Alo": TimeInForce.GTT, # ALO uses GTT with post_only=True +} + + +class ExtendedValidationError(Exception): + """Validation error for Exchange API.""" + pass + + +def parse_order_type(order_type: Optional[Dict[str, Any]]) -> tuple: + """ + Parse Hyperliquid order_type to Extended params. + + Returns: + Tuple of (TimeInForce, post_only) + """ + if order_type is None: + return TimeInForce.GTT, False + + if "limit" in order_type: + tif = order_type["limit"].get("tif", "Gtc") + post_only = tif == "Alo" + return TIF_MAPPING.get(tif, TimeInForce.GTT), post_only + + return TimeInForce.GTT, False + + +def parse_builder(builder: Optional[Dict[str, Any]]) -> tuple: + """ + Parse Hyperliquid builder format to Extended params. + + Returns: + Tuple of (builder_id, builder_fee) + """ + if builder is None: + return None, None + + builder_id = int(builder["b"]) + fee_tenths_bps = builder.get("f", 0) + builder_fee = Decimal(fee_tenths_bps) / Decimal(100000) + + return builder_id, builder_fee + + +class NativeSyncExchangeAPI(BaseNativeSyncClient): + """ + Extended Exchange Native Sync trading API with Hyperliquid-compatible interface. + + Uses requests for HTTP and X10 signing infrastructure for order signing. + All operations are synchronous. + + Example: + exchange = NativeSyncExchangeAPI(auth, config) + result = exchange.order("BTC", is_buy=True, sz=0.01, limit_px=50000) + exchange.cancel("BTC", oid=12345) + """ + + def __init__(self, auth: SimpleSyncAuth, config: SimpleSyncConfig): + """ + Initialize the native sync Exchange API. + + Args: + auth: SimpleSyncAuth instance with credentials + config: SimpleSyncConfig configuration + """ + super().__init__(auth, config) + + # Create StarkPerpetualAccount for signing (sync!) + self._stark_account = StarkPerpetualAccount( + vault=auth.vault, + private_key=auth.stark_private_key, + public_key=auth.stark_public_key, + api_key=auth.api_key, + ) + + # Create StarknetDomain from config + self._starknet_domain = StarknetDomain( + name="Perpetuals", + version="v0", + chain_id="SN_MAIN" if "sepolia" not in config.api_base_url else "SN_SEPOLIA", + revision="1", + ) + + # Cache for market models + self._markets_cache: Dict[str, MarketModel] = {} + + def _get_market(self, market_name: str) -> MarketModel: + """ + Get MarketModel for a market, with caching. + + Args: + market_name: Market name in Extended format (e.g., "BTC-USD") + + Returns: + MarketModel instance + """ + if market_name not in self._markets_cache: + # Fetch markets from API + response = self.get("/info/markets", authenticated=False) + markets_data = response.get("data", []) + + for market_data in markets_data: + try: + market = MarketModel.model_validate(market_data) + self._markets_cache[market.name] = market + except Exception: + # Skip markets that fail to parse + pass + + if market_name not in self._markets_cache: + raise ExtendedValidationError(f"Market {market_name} not found") + + return self._markets_cache[market_name] + + def order( + self, + name: str, + is_buy: bool, + sz: float, + limit_px: float, + order_type: Optional[Dict[str, Any]] = None, + reduce_only: bool = False, + cloid: Optional[str] = None, + builder: Optional[Dict[str, Any]] = None, + ) -> Dict[str, Any]: + """ + Place a limit order - NATIVE SYNC with proper signing. + + Args: + name: Market name (e.g., "BTC" or "BTC-USD") + is_buy: True for buy, False for sell + sz: Size in base asset + limit_px: Limit price + order_type: {"limit": {"tif": "Gtc"}} or {"limit": {"tif": "Ioc"}} + or {"limit": {"tif": "Alo"}} (post-only) + reduce_only: Only reduce position + cloid: Client order ID (maps to external_id) + builder: {"b": builder_id, "f": fee_bps_tenths} + + Returns: + Hyperliquid-format response: + {"status": "ok", "response": {"type": "order", "data": {"statuses": [...]}}} + """ + market_name = normalize_market_name(name) + side = OrderSide.BUY if is_buy else OrderSide.SELL + tif, post_only = parse_order_type(order_type) + builder_id, builder_fee = parse_builder(builder) + + try: + # Get market model for order creation + market = self._get_market(market_name) + + # Create signed order using X10 infrastructure (all sync!) + order = create_order_object( + account=self._stark_account, + market=market, + amount_of_synthetic=Decimal(str(sz)), + price=Decimal(str(limit_px)), + side=side, + starknet_domain=self._starknet_domain, + post_only=post_only, + time_in_force=tif, + order_external_id=cloid, + builder_fee=builder_fee, + builder_id=builder_id, + reduce_only=reduce_only, + ) + + # Send order via HTTP + order_data = order.to_api_request_json(exclude_none=True) + response = self.post("/user/order", data=order_data, authenticated=True) + + # Transform response + data = response.get("data", {}) + return { + "status": "ok", + "response": { + "type": "order", + "data": { + "statuses": [ + { + "resting": { + "oid": data.get("id", data.get("orderId", 0)), + "cloid": data.get("externalId", cloid), + } + } + ] + }, + }, + } + + except ExtendedAPIError as e: + return SyncOrderTransformer.transform_error_response(str(e.message)) + except Exception as e: + return SyncOrderTransformer.transform_error_response(str(e)) + + def bulk_orders( + self, + order_requests: List[Dict[str, Any]], + builder: Optional[Dict[str, Any]] = None, + grouping: str = "na", + ) -> Dict[str, Any]: + """ + Place multiple orders sequentially - NATIVE SYNC. + + WARNING: Unlike Hyperliquid, Extended does not support atomic + bulk orders. Orders are sent sequentially and may partially fail. + + Args: + order_requests: List of order dicts with keys: + - coin, is_buy, sz, limit_px, order_type, reduce_only, cloid + builder: Builder info applied to all orders + grouping: Ignored (no native support) + + Returns: + Combined results from all orders + """ + results = [] + + for request in order_requests: + try: + result = self.order( + name=request["coin"], + is_buy=request["is_buy"], + sz=request["sz"], + limit_px=request["limit_px"], + order_type=request.get("order_type", {"limit": {"tif": "Gtc"}}), + reduce_only=request.get("reduce_only", False), + cloid=request.get("cloid"), + builder=builder or request.get("builder"), + ) + + if result.get("status") == "ok": + statuses = result.get("response", {}).get("data", {}).get("statuses", []) + if statuses and "resting" in statuses[0]: + results.append({ + "status": "ok", + "data": { + "id": statuses[0]["resting"]["oid"], + "external_id": statuses[0]["resting"]["cloid"], + } + }) + else: + results.append({"status": "error", "error": result.get("response", "Unknown error")}) + else: + results.append({"status": "error", "error": result.get("response", "Unknown error")}) + + except Exception as e: + results.append({"status": "error", "error": str(e)}) + + return SyncOrderTransformer.transform_bulk_orders_response(results) + + def cancel( + self, + name: str, + oid: Optional[int] = None, + cloid: Optional[str] = None, + ) -> Dict[str, Any]: + """ + Cancel an order by oid or cloid - NATIVE SYNC. + + Args: + name: Market name (required for Hyperliquid compat, may be ignored) + oid: Internal order ID + cloid: Client order ID (external_id) + + Returns: + Hyperliquid-format response: + {"status": "ok", "response": {"type": "cancel", "data": {"statuses": [...]}}} + """ + if oid is None and cloid is None: + raise ExtendedValidationError("Either oid or cloid must be provided") + + try: + if oid is not None: + # Endpoint: /user/order/ (DELETE) + response = self.delete(f"/user/order/{oid}", authenticated=True) + else: + # Endpoint: /user/order?externalId= (DELETE) + response = self.delete("/user/order", params={"externalId": cloid}, authenticated=True) + + return SyncOrderTransformer.transform_cancel_response(success=True, order_id=oid) + + except ExtendedAPIError as e: + return SyncOrderTransformer.transform_error_response(str(e.message)) + except Exception as e: + return SyncOrderTransformer.transform_error_response(str(e)) + + def cancel_by_cloid(self, name: str, cloid: str) -> Dict[str, Any]: + """ + Cancel order by client order ID - NATIVE SYNC. + + Args: + name: Market name + cloid: Client order ID + + Returns: + Cancel response in Hyperliquid format + """ + return self.cancel(name, cloid=cloid) + + def bulk_cancel( + self, + cancel_requests: List[Dict[str, Any]], + ) -> Dict[str, Any]: + """ + Cancel multiple orders - NATIVE SYNC. + + Args: + cancel_requests: List of {"coin": str, "oid": int} + or {"coin": str, "cloid": str} + + Returns: + Combined cancel results + """ + oids = [] + cloids = [] + + for req in cancel_requests: + if "oid" in req and req["oid"] is not None: + oids.append(req["oid"]) + elif "cloid" in req and req["cloid"] is not None: + cloids.append(req["cloid"]) + + try: + cancel_data = {} + if oids: + cancel_data["orderIds"] = oids + if cloids: + cancel_data["externalOrderIds"] = cloids + + # Endpoint: /user/order/massCancel (POST) + response = self.post("/user/order/massCancel", data=cancel_data, authenticated=True) + + return { + "status": "ok", + "response": { + "type": "cancel", + "data": { + "statuses": ["success"] * len(cancel_requests) + }, + }, + } + + except ExtendedAPIError as e: + return SyncOrderTransformer.transform_error_response(str(e.message)) + except Exception as e: + return SyncOrderTransformer.transform_error_response(str(e)) + + def update_leverage( + self, + leverage: int, + name: str, + is_cross: bool = True, + ) -> Dict[str, Any]: + """ + Update leverage for a market - NATIVE SYNC. + + Args: + leverage: Target leverage (1-50) + name: Market name + is_cross: Ignored (Extended only supports cross margin) + + Returns: + Hyperliquid-format response: + {"status": "ok", "response": {"type": "leverage"}} + """ + if not is_cross: + warnings.warn( + "Extended Exchange only supports cross margin. " + "is_cross=False will be ignored.", + UserWarning, + ) + + market_name = normalize_market_name(name) + + try: + leverage_data = { + "market": market_name, + "leverage": str(leverage) + } + + # Endpoint: /user/leverage (PATCH) + response = self.patch("/user/leverage", data=leverage_data, authenticated=True) + return SyncOrderTransformer.transform_leverage_response() + + except ExtendedAPIError as e: + return SyncOrderTransformer.transform_error_response(str(e.message)) + except Exception as e: + return SyncOrderTransformer.transform_error_response(str(e)) + + def _calculate_market_order_price( + self, + name: str, + is_buy: bool, + slippage: float, + ) -> Decimal: + """ + Calculate limit price for simulated market order - NATIVE SYNC. + + Extended constraints: + - Buy: price <= mark_price * 1.05 + - Sell: price >= mark_price * 0.95 + + Args: + name: Market name + is_buy: True for buy + slippage: Max slippage (e.g., 0.05 for 5%) + + Returns: + Calculated limit price (rounded to market precision) + """ + market_name = normalize_market_name(name) + + # Get market for tick size + market = self._get_market(market_name) + tick_size = market.trading_config.min_price_change + + # Get orderbook and stats + orderbook_response = self.get(f"/info/markets/{market_name}/orderbook", authenticated=False) + stats_response = self.get(f"/info/markets/{market_name}/stats", authenticated=False) + + orderbook = orderbook_response.get("data", {}) + stats = stats_response.get("data", {}) + mark_price = Decimal(str(stats.get("markPrice", stats.get("mark_price", "0")))) + + if is_buy: + asks = orderbook.get("ask", orderbook.get("asks", [])) + best_ask = Decimal(str(asks[0].get("price", mark_price))) if asks else mark_price + + target_price = best_ask * Decimal(1 + slippage) + max_price = mark_price * Decimal(str(MARKET_ORDER_PRICE_CAP)) + price = min(target_price, max_price) + + return (price / tick_size).quantize(Decimal('1'), rounding=ROUND_CEILING) * tick_size + else: + bids = orderbook.get("bid", orderbook.get("bids", [])) + best_bid = Decimal(str(bids[0].get("price", mark_price))) if bids else mark_price + + target_price = best_bid * Decimal(1 - slippage) + min_price = mark_price * Decimal(str(MARKET_ORDER_PRICE_FLOOR)) + price = max(target_price, min_price) + + return (price / tick_size).quantize(Decimal('1'), rounding=ROUND_FLOOR) * tick_size + + def market_open( + self, + name: str, + is_buy: bool, + sz: float, + px: Optional[float] = None, + slippage: float = DEFAULT_SLIPPAGE, + cloid: Optional[str] = None, + builder: Optional[Dict[str, Any]] = None, + ) -> Dict[str, Any]: + """ + Open a position with a market order - NATIVE SYNC. + + Note: Extended simulates market orders as IOC limit orders. + + Args: + name: Market name + is_buy: True for long, False for short + sz: Size in base asset + px: Optional price hint (uses orderbook if not provided) + slippage: Max slippage (default 5%) + cloid: Client order ID + builder: Builder info + + Returns: + Order response in Hyperliquid format + """ + if px is not None: + limit_price = Decimal(str(px)) + else: + limit_price = self._calculate_market_order_price(name, is_buy, slippage) + + return self.order( + name=name, + is_buy=is_buy, + sz=sz, + limit_px=float(limit_price), + order_type={"limit": {"tif": "Ioc"}}, + reduce_only=False, + cloid=cloid, + builder=builder, + ) + + def market_close( + self, + coin: str, + sz: Optional[float] = None, + px: Optional[float] = None, + slippage: float = DEFAULT_SLIPPAGE, + cloid: Optional[str] = None, + builder: Optional[Dict[str, Any]] = None, + ) -> Dict[str, Any]: + """ + Close a position with a market order - NATIVE SYNC. + + Args: + coin: Market name + sz: Size to close (None = close entire position) + px: Optional price hint + slippage: Max slippage + cloid: Client order ID + builder: Builder info + + Returns: + Order response in Hyperliquid format + """ + market_name = normalize_market_name(coin) + + # Get current position + params = {"market": [market_name]} + positions_response = self.get("/user/positions", params=params, authenticated=True) + + positions = positions_response.get("data", []) + if not positions: + return SyncOrderTransformer.transform_error_response( + f"No open position found for {coin}" + ) + + position = positions[0] + + # Determine size to close + close_sz = float(sz) if sz is not None else abs(float(position.get("size", 0))) + + # Close is opposite side + side = position.get("side", "LONG") + is_buy = side == "SHORT" + + if px is not None: + limit_price = Decimal(str(px)) + else: + limit_price = self._calculate_market_order_price(coin, is_buy, slippage) + + return self.order( + name=coin, + is_buy=is_buy, + sz=close_sz, + limit_px=float(limit_price), + order_type={"limit": {"tif": "Ioc"}}, + reduce_only=True, + cloid=cloid, + builder=builder, + ) diff --git a/extended/api/exchange_new_sync.py b/extended/api/exchange_new_sync.py new file mode 100644 index 0000000..70bf6cd --- /dev/null +++ b/extended/api/exchange_new_sync.py @@ -0,0 +1,45 @@ +""" +Sync Exchange API for Extended Exchange SDK. + +Provides trading operations matching Hyperliquid's Exchange class interface. +Uses native sync implementation instead of async wrapper. +""" + +from typing import Any, Dict, List, Optional + +from x10.perpetual.configuration import EndpointConfig + +from extended.api.base_native_sync import BaseNativeSyncClient +from extended.api.exchange_native_sync import NativeSyncExchangeAPI +from extended.auth import ExtendedAuth + + +class ExchangeAPI(NativeSyncExchangeAPI): + """ + Extended Exchange trading API with Hyperliquid-compatible interface. + + Native synchronous implementation - NO async/await anywhere. + + Handles order placement, cancellation, and account management. + + Example: + exchange = ExchangeAPI(auth, config) + result = exchange.order("BTC", is_buy=True, sz=0.01, limit_px=50000) + exchange.cancel("BTC", oid=12345) + """ + + def __init__(self, auth: ExtendedAuth, config: EndpointConfig): + """ + Initialize the sync Exchange API. + + Args: + auth: ExtendedAuth instance with credentials + config: Endpoint configuration + """ + # Use native sync implementation directly + super().__init__(auth, config) + + def close(self): + """Close the API and release resources.""" + # No async cleanup needed for native sync implementation + pass \ No newline at end of file diff --git a/extended/api/exchange_old_async_wrapper.py b/extended/api/exchange_old_async_wrapper.py new file mode 100644 index 0000000..65f145e --- /dev/null +++ b/extended/api/exchange_old_async_wrapper.py @@ -0,0 +1,260 @@ +""" +Sync Exchange API for Extended Exchange SDK. + +Provides trading operations matching Hyperliquid's Exchange class interface. +Wraps AsyncExchangeAPI to provide synchronous interface. +""" + +from typing import Any, Dict, List, Optional + +from x10.perpetual.configuration import EndpointConfig + +from extended.api.base import BaseSyncAPI +from extended.api.exchange_async import AsyncExchangeAPI +from extended.auth import ExtendedAuth +from extended.utils.constants import DEFAULT_SLIPPAGE +from extended.utils.helpers import run_sync + + +class ExchangeAPI(BaseSyncAPI): + """ + Extended Exchange trading API with Hyperliquid-compatible interface. + + Synchronous wrapper around AsyncExchangeAPI. + + Example: + exchange = ExchangeAPI(auth, config) + result = exchange.order("BTC", is_buy=True, sz=0.01, limit_px=50000) + exchange.cancel("BTC", oid=12345) + """ + + def __init__(self, auth: ExtendedAuth, config: EndpointConfig): + """ + Initialize the sync Exchange API. + + Args: + auth: ExtendedAuth instance with credentials + config: Endpoint configuration + """ + super().__init__(auth, config) + self._async = AsyncExchangeAPI(auth, config) + + def order( + self, + name: str, + is_buy: bool, + sz: float, + limit_px: float, + order_type: Optional[Dict[str, Any]] = None, + reduce_only: bool = False, + cloid: Optional[str] = None, + builder: Optional[Dict[str, Any]] = None, + ) -> Dict[str, Any]: + """ + Place a limit order. + + Args: + name: Market name (e.g., "BTC" or "BTC-USD") + is_buy: True for buy, False for sell + sz: Size in base asset + limit_px: Limit price + order_type: {"limit": {"tif": "Gtc"}} or {"limit": {"tif": "Ioc"}} + or {"limit": {"tif": "Alo"}} (post-only) + reduce_only: Only reduce position + cloid: Client order ID (maps to external_id) + builder: {"b": builder_id, "f": fee_bps_tenths} + + Returns: + Hyperliquid-format response + """ + return run_sync( + self._async.order( + name=name, + is_buy=is_buy, + sz=sz, + limit_px=limit_px, + order_type=order_type, + reduce_only=reduce_only, + cloid=cloid, + builder=builder, + ) + ) + + def bulk_orders( + self, + order_requests: List[Dict[str, Any]], + builder: Optional[Dict[str, Any]] = None, + grouping: str = "na", + ) -> Dict[str, Any]: + """ + Place multiple orders. + + WARNING: Unlike Hyperliquid, Extended does not support atomic + bulk orders. Orders are sent in parallel and may partially fail. + + Args: + order_requests: List of order dicts with keys: + - coin, is_buy, sz, limit_px, order_type, reduce_only, cloid + builder: Builder info applied to all orders + grouping: Ignored (no native support) + + Returns: + Combined results from all orders + """ + return run_sync( + self._async.bulk_orders( + order_requests=order_requests, + builder=builder, + grouping=grouping, + ) + ) + + def cancel( + self, + name: str, + oid: Optional[int] = None, + cloid: Optional[str] = None, + ) -> Dict[str, Any]: + """ + Cancel an order by oid or cloid. + + Args: + name: Market name (required for Hyperliquid compat, may be ignored) + oid: Internal order ID + cloid: Client order ID (external_id) + + Returns: + Cancel response in Hyperliquid format + """ + return run_sync(self._async.cancel(name=name, oid=oid, cloid=cloid)) + + def cancel_by_cloid(self, name: str, cloid: str) -> Dict[str, Any]: + """ + Cancel order by client order ID. + + Args: + name: Market name + cloid: Client order ID + + Returns: + Cancel response in Hyperliquid format + """ + return run_sync(self._async.cancel_by_cloid(name=name, cloid=cloid)) + + def bulk_cancel( + self, + cancel_requests: List[Dict[str, Any]], + ) -> Dict[str, Any]: + """ + Cancel multiple orders. + + Args: + cancel_requests: List of {"coin": str, "oid": int} + or {"coin": str, "cloid": str} + + Returns: + Combined cancel results + """ + return run_sync(self._async.bulk_cancel(cancel_requests=cancel_requests)) + + def update_leverage( + self, + leverage: int, + name: str, + is_cross: bool = True, + ) -> Dict[str, Any]: + """ + Update leverage for a market. + + Args: + leverage: Target leverage (1-50) + name: Market name + is_cross: Ignored (Extended only supports cross margin) + + Returns: + Leverage update response in Hyperliquid format + """ + return run_sync( + self._async.update_leverage( + leverage=leverage, + name=name, + is_cross=is_cross, + ) + ) + + def market_open( + self, + name: str, + is_buy: bool, + sz: float, + px: Optional[float] = None, + slippage: float = DEFAULT_SLIPPAGE, + cloid: Optional[str] = None, + builder: Optional[Dict[str, Any]] = None, + ) -> Dict[str, Any]: + """ + Open a position with a market order. + + Note: Extended simulates market orders as IOC limit orders. + + Args: + name: Market name + is_buy: True for long, False for short + sz: Size in base asset + px: Optional price hint (uses orderbook if not provided) + slippage: Max slippage (default 5%) + cloid: Client order ID + builder: Builder info + + Returns: + Order response in Hyperliquid format + """ + return run_sync( + self._async.market_open( + name=name, + is_buy=is_buy, + sz=sz, + px=px, + slippage=slippage, + cloid=cloid, + builder=builder, + ) + ) + + def market_close( + self, + coin: str, + sz: Optional[float] = None, + px: Optional[float] = None, + slippage: float = DEFAULT_SLIPPAGE, + cloid: Optional[str] = None, + builder: Optional[Dict[str, Any]] = None, + ) -> Dict[str, Any]: + """ + Close a position with a market order. + + Args: + coin: Market name + sz: Size to close (None = close entire position) + px: Optional price hint + slippage: Max slippage + cloid: Client order ID + builder: Builder info + + Returns: + Order response in Hyperliquid format + """ + return run_sync( + self._async.market_close( + coin=coin, + sz=sz, + px=px, + slippage=slippage, + cloid=cloid, + builder=builder, + ) + ) + + def close(self): + """Close the API and release resources.""" + run_sync(self._async.close()) diff --git a/extended/api/info.py b/extended/api/info.py new file mode 100644 index 0000000..abf12fb --- /dev/null +++ b/extended/api/info.py @@ -0,0 +1,46 @@ +""" +Sync Info API for Extended Exchange SDK. + +Provides read-only operations matching Hyperliquid's Info class interface. +Uses native sync implementation instead of async wrapper. +""" + +from typing import Any, Dict, List, Optional + +from extended.api.base_native_sync import BaseNativeSyncClient +from extended.api.info_native_sync import NativeSyncInfoAPI +from extended.auth_sync import SimpleSyncAuth +from extended.config_sync import SimpleSyncConfig + + +class InfoAPI(NativeSyncInfoAPI): + """ + Extended Exchange Info API with Hyperliquid-compatible interface. + + Native synchronous implementation - pure sync. + + Note: Unlike Hyperliquid, Extended requires authentication for + user-specific data. The `address` parameter is accepted for + interface compatibility but ignored (uses authenticated user). + + Example: + info = InfoAPI(auth, config) + state = info.user_state() + orders = info.open_orders() + """ + + def __init__(self, auth: SimpleSyncAuth, config: SimpleSyncConfig): + """ + Initialize the sync Info API. + + Args: + auth: SimpleSyncAuth instance with credentials + config: SimpleSyncConfig configuration + """ + # Use native sync implementation directly + super().__init__(auth, config) + + def close(self): + """Close the API and release resources.""" + # No async cleanup needed for native sync implementation + pass \ No newline at end of file diff --git a/extended/api/info_async.py b/extended/api/info_async.py new file mode 100644 index 0000000..3f2a95f --- /dev/null +++ b/extended/api/info_async.py @@ -0,0 +1,290 @@ +""" +Async Info API for Extended Exchange SDK. + +Provides read-only operations matching Hyperliquid's Info class interface. +""" + +import warnings +from datetime import datetime, timezone +from typing import Any, Dict, List, Optional + +from x10.perpetual.candles import CandleType +from x10.perpetual.configuration import EndpointConfig + +from extended.api.base_async import BaseAsyncAPI +from extended.auth import ExtendedAuth +from extended.transformers import AccountTransformer, MarketTransformer, OrderTransformer +from extended.utils.async_helpers import thread_safe_gather +from extended.utils.constants import INTERVAL_MAPPING, DEFAULT_CANDLE_TYPE +from extended.utils.helpers import normalize_market_name, to_hyperliquid_market_name + + +class AsyncInfoAPI(BaseAsyncAPI): + """ + Extended Exchange Info API with Hyperliquid-compatible interface. + + Note: Unlike Hyperliquid, Extended requires authentication for + user-specific data. The `address` parameter is accepted for + interface compatibility but ignored (uses authenticated user). + + Example: + async_info = AsyncInfoAPI(auth, config) + state = await async_info.user_state() + orders = await async_info.open_orders() + """ + + def __init__(self, auth: ExtendedAuth, config: EndpointConfig): + """ + Initialize the async Info API. + + Args: + auth: ExtendedAuth instance with credentials + config: Endpoint configuration + """ + super().__init__(auth, config) + + async def user_state(self, address: Optional[str] = None) -> Dict[str, Any]: + """ + Get user's account state (balance + positions). + + Args: + address: Ignored (Extended requires auth, uses authenticated user) + + Returns: + Dict with Hyperliquid-compatible structure containing: + - assetPositions: List of position info + - crossMarginSummary: Account value and margin info + - marginSummary: Margin details + - withdrawable: Available for withdrawal + + Note: + Unlike Hyperliquid, Extended requires authentication. + The `address` parameter is accepted for interface compatibility + but is ignored. Data is always for the authenticated user. + """ + if address is not None and address != self._auth.address: + warnings.warn( + "Extended Exchange does not support querying other users. " + f"Ignoring address={address}, using authenticated user.", + UserWarning, + ) + + # Fetch balance and positions in parallel + balance_task = self._client.account.get_balance() + positions_task = self._client.account.get_positions() + + balance_response, positions_response = await thread_safe_gather( + balance_task, positions_task + ) + + return AccountTransformer.transform_user_state( + balance_response.data, + positions_response.data or [], + ) + + async def open_orders(self, address: Optional[str] = None) -> List[Dict[str, Any]]: + """ + Get user's open orders. + + Args: + address: Ignored (uses authenticated user) + + Returns: + List of orders in Hyperliquid format with keys: + - coin: Market name (e.g., "BTC") + - side: "B" for buy, "A" for sell + - limitPx: Limit price + - sz: Remaining size + - oid: Order ID + - timestamp: Creation timestamp + - origSz: Original size + - cloid: Client order ID (external_id) + """ + if address is not None and address != self._auth.address: + warnings.warn( + "Extended Exchange does not support querying other users. " + f"Ignoring address={address}, using authenticated user.", + UserWarning, + ) + + response = await self._client.account.get_open_orders() + return OrderTransformer.transform_open_orders(response.data or []) + + async def meta(self) -> Dict[str, Any]: + """ + Get exchange metadata (markets info). + + Returns: + Dict with Hyperliquid-compatible structure: + { + "universe": [ + { + "name": "BTC", + "szDecimals": 5, + "maxLeverage": 50, + "onlyIsolated": False, + } + ] + } + """ + response = await self._client.markets_info.get_markets() + return MarketTransformer.transform_meta(response.data or []) + + async def all_mids(self) -> Dict[str, str]: + """ + Get mid prices for all markets. + + Returns: + Dict mapping coin name to mid price string: + {"BTC": "50000.5", "ETH": "3000.25", ...} + """ + response = await self._client.markets_info.get_markets() + return MarketTransformer.transform_all_mids(response.data or []) + + async def l2_snapshot(self, name: str) -> Dict[str, Any]: + """ + Get order book snapshot. + + Args: + name: Market name (e.g., "BTC-USD" or "BTC") + + Returns: + Dict in Hyperliquid format: + { + "coin": "BTC", + "levels": [ + [{"px": "50000.0", "sz": "1.5", "n": 3}], # bids + [{"px": "50001.0", "sz": "2.0", "n": 5}], # asks + ], + "time": 1234567890000 + } + """ + market_name = normalize_market_name(name) + response = await self._client.markets_info.get_orderbook_snapshot( + market_name=market_name + ) + return MarketTransformer.transform_l2_snapshot(response.data) + + async def candles_snapshot( + self, + name: str, + interval: str, + startTime: int, + endTime: int, + candle_type: str = DEFAULT_CANDLE_TYPE, + ) -> List[Dict[str, Any]]: + """ + Get historical candles. + + Args: + name: Market name (e.g., "BTC" or "BTC-USD") + interval: "1m", "5m", "15m", "30m", "1h", "2h", "4h", "1d" + startTime: Start timestamp (ms) + endTime: End timestamp (ms) + candle_type: Type of candle data (default "trades") + - "trades": Trade-based candles + - "mark-prices": Mark price candles + - "index-prices": Index price candles + + Returns: + List of candles in Hyperliquid format: + [{"t": ts, "T": close_ts, "s": symbol, "i": interval, + "o": open, "h": high, "l": low, "c": close, "v": vol, "n": 0}] + """ + market_name = normalize_market_name(name) + extended_interval = INTERVAL_MAPPING.get(interval, "PT1M") + + # Calculate limit based on time range and interval + # Extended API uses limit parameter, not startTime + end_dt = datetime.fromtimestamp(endTime / 1000, tz=timezone.utc) + + response = await self._client.markets_info.get_candles_history( + market_name=market_name, + candle_type=candle_type, # type: ignore + interval=extended_interval, # type: ignore + end_time=end_dt, + limit=1000, # Fetch max and filter by startTime + ) + + # Filter candles by startTime + candles = [c for c in (response.data or []) if c.timestamp >= startTime] + + coin = to_hyperliquid_market_name(name) + return MarketTransformer.transform_candles(candles, coin, interval) + + async def user_fills( + self, + coin: Optional[str] = None, + address: Optional[str] = None, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + ) -> List[Dict[str, Any]]: + """ + Get user's trade fills. + + Args: + coin: Market name (optional - if None, returns fills for all markets) + address: Ignored (uses authenticated user) + start_time: Optional start timestamp (ms) + end_time: Optional end timestamp (ms) + + Returns: + List of fills in Hyperliquid format (up to 1000 most recent) + + Note: + The `cloid` field will be null in responses (not available from trades endpoint). + """ + if address is not None and address != self._auth.address: + warnings.warn( + "Extended Exchange does not support querying other users. " + f"Ignoring address={address}, using authenticated user.", + UserWarning, + ) + + market_names = [normalize_market_name(coin)] if coin else None + response = await self._client.account.get_trades( + market_names=market_names, + ) + + # Filter by time if provided + trades = response.data or [] + if start_time is not None: + trades = [t for t in trades if t.created_time >= start_time] + if end_time is not None: + trades = [t for t in trades if t.created_time <= end_time] + + return OrderTransformer.transform_user_fills(trades) + + async def get_position_leverage( + self, + symbol: str, + address: Optional[str] = None, + ) -> Optional[int]: + """ + Get current leverage for a position. + + Args: + symbol: Market name (e.g., "BTC" or "BTC-USD") + address: Ignored (uses authenticated user) + + Returns: + Current leverage as integer, or None if not found + """ + if address is not None and address != self._auth.address: + warnings.warn( + "Extended Exchange does not support querying other users. " + f"Ignoring address={address}, using authenticated user.", + UserWarning, + ) + + market_name = normalize_market_name(symbol) + response = await self._client.account.get_leverage( + market_names=[market_name] + ) + + if response.data: + for lev in response.data: + if lev.market == market_name: + return int(lev.leverage) + + return None diff --git a/extended/api/info_native_sync.py b/extended/api/info_native_sync.py new file mode 100644 index 0000000..eabb908 --- /dev/null +++ b/extended/api/info_native_sync.py @@ -0,0 +1,273 @@ +""" +Native Sync Info API for Extended Exchange SDK. + +Provides read-only operations matching Hyperliquid's Info class interface. +Uses direct HTTP calls with requests - no async dependencies. +""" + +import warnings +from datetime import datetime, timezone +from typing import Any, Dict, List, Optional + +from extended.api.base_native_sync import BaseNativeSyncClient +from extended.auth_sync import SimpleSyncAuth +from extended.config_sync import SimpleSyncConfig +from extended.transformers_sync import ( + SyncAccountTransformer, + SyncMarketTransformer, + SyncOrderTransformer, + normalize_market_name, + to_hyperliquid_market_name, +) + +# Interval mapping (Hyperliquid -> Extended) +INTERVAL_MAPPING = { + "1m": "PT1M", + "5m": "PT5M", + "15m": "PT15M", + "30m": "PT30M", + "1h": "PT1H", + "2h": "PT2H", + "4h": "PT4H", + "1d": "P1D", +} + +DEFAULT_CANDLE_TYPE = "trades" + + +class NativeSyncInfoAPI(BaseNativeSyncClient): + """ + Extended Exchange Native Sync Info API with Hyperliquid-compatible interface. + + Uses requests directly for pure synchronous operation. + + Note: Unlike Hyperliquid, Extended requires authentication for + user-specific data. The `address` parameter is accepted for + interface compatibility but ignored (uses authenticated user). + + Example: + info = NativeSyncInfoAPI(auth, config) + state = info.user_state() + orders = info.open_orders() + """ + + def __init__(self, auth: SimpleSyncAuth, config: SimpleSyncConfig): + """ + Initialize the native sync Info API. + + Args: + auth: SimpleSyncAuth instance with credentials + config: SimpleSyncConfig configuration + """ + super().__init__(auth, config) + + def user_state(self, address: Optional[str] = None) -> Dict[str, Any]: + """ + Get user's account state (balance + positions) - NATIVE SYNC. + + Args: + address: Ignored (Extended requires auth, uses authenticated user) + + Returns: + Dict with Hyperliquid-compatible structure containing: + - assetPositions: List of position info + - crossMarginSummary: Account value and margin info + - marginSummary: Margin details + - withdrawable: Available for withdrawal + """ + if address is not None and address != self.auth.address: + warnings.warn( + "Extended Exchange does not support querying other users. " + f"Ignoring address={address}, using authenticated user.", + UserWarning, + ) + + # Endpoints from x10/perpetual/trading_client/account_module.py + balance_response = self.get("/user/balance", authenticated=True) + positions_response = self.get("/user/positions", authenticated=True) + + return SyncAccountTransformer.transform_user_state( + balance_response.get("data", {}), + positions_response.get("data", []) or [], + ) + + def open_orders(self, address: Optional[str] = None) -> List[Dict[str, Any]]: + """ + Get user's open orders - NATIVE SYNC. + + Args: + address: Ignored (uses authenticated user) + + Returns: + List of orders in Hyperliquid format + """ + if address is not None and address != self.auth.address: + warnings.warn( + "Extended Exchange does not support querying other users. " + f"Ignoring address={address}, using authenticated user.", + UserWarning, + ) + + # Endpoint: /user/orders + response = self.get("/user/orders", authenticated=True) + return SyncOrderTransformer.transform_open_orders(response.get("data", []) or []) + + def meta(self) -> Dict[str, Any]: + """ + Get exchange metadata (markets info) - NATIVE SYNC. + + Returns: + Dict with Hyperliquid-compatible structure with universe list + """ + # Endpoint: /info/markets + response = self.get("/info/markets", authenticated=False) + return SyncMarketTransformer.transform_meta(response.get("data", []) or []) + + def all_mids(self) -> Dict[str, str]: + """ + Get mid prices for all markets - NATIVE SYNC. + + Returns: + Dict mapping coin name to mid price string + """ + # Endpoint: /info/markets (includes stats) + response = self.get("/info/markets", authenticated=False) + return SyncMarketTransformer.transform_all_mids(response.get("data", []) or []) + + def l2_snapshot(self, name: str) -> Dict[str, Any]: + """ + Get order book snapshot - NATIVE SYNC. + + Args: + name: Market name (e.g., "BTC-USD" or "BTC") + + Returns: + Dict in Hyperliquid format with coin, levels, and time + """ + market_name = normalize_market_name(name) + # Endpoint: /info/markets//orderbook + response = self.get(f"/info/markets/{market_name}/orderbook", authenticated=False) + return SyncMarketTransformer.transform_l2_snapshot(response.get("data", {})) + + def candles_snapshot( + self, + name: str, + interval: str, + startTime: int, + endTime: int, + candle_type: str = DEFAULT_CANDLE_TYPE, + ) -> List[Dict[str, Any]]: + """ + Get historical candles - NATIVE SYNC. + + Args: + name: Market name (e.g., "BTC" or "BTC-USD") + interval: "1m", "5m", "15m", "30m", "1h", "2h", "4h", "1d" + startTime: Start timestamp (ms) + endTime: End timestamp (ms) + candle_type: Type of candle data (default "trades") + + Returns: + List of candles in Hyperliquid format + """ + market_name = normalize_market_name(name) + extended_interval = INTERVAL_MAPPING.get(interval, "PT1M") + + # Convert endTime to milliseconds for API + params = { + "interval": extended_interval, + "endTime": endTime, + "limit": 1000, + } + + # Endpoint: /info/candles// + response = self.get( + f"/info/candles/{market_name}/{candle_type}", + params=params, + authenticated=False + ) + + # Filter candles by startTime + candles = response.get("data", []) or [] + filtered_candles = [c for c in candles if c.get("timestamp", 0) >= startTime] + + coin = to_hyperliquid_market_name(name) + return SyncMarketTransformer.transform_candles(filtered_candles, coin, interval) + + def user_fills( + self, + coin: Optional[str] = None, + address: Optional[str] = None, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + ) -> List[Dict[str, Any]]: + """ + Get user's trade fills - NATIVE SYNC. + + Args: + coin: Market name (optional - if None, returns fills for all markets) + address: Ignored (uses authenticated user) + start_time: Optional start timestamp (ms) + end_time: Optional end timestamp (ms) + + Returns: + List of fills in Hyperliquid format (up to 1000 most recent) + """ + if address is not None and address != self.auth.address: + warnings.warn( + "Extended Exchange does not support querying other users. " + f"Ignoring address={address}, using authenticated user.", + UserWarning, + ) + + params = {} + if coin: + params["market"] = normalize_market_name(coin) + + # Endpoint: /user/trades + response = self.get("/user/trades", params=params, authenticated=True) + trades = response.get("data", []) or [] + + # Filter by time if provided + if start_time is not None: + trades = [t for t in trades if t.get("createdTime", t.get("created_time", 0)) >= start_time] + if end_time is not None: + trades = [t for t in trades if t.get("createdTime", t.get("created_time", 0)) <= end_time] + + return SyncOrderTransformer.transform_user_fills(trades) + + def get_position_leverage( + self, + symbol: str, + address: Optional[str] = None, + ) -> Optional[int]: + """ + Get current leverage for a position - NATIVE SYNC. + + Args: + symbol: Market name (e.g., "BTC" or "BTC-USD") + address: Ignored (uses authenticated user) + + Returns: + Current leverage as integer, or None if not found + """ + if address is not None and address != self.auth.address: + warnings.warn( + "Extended Exchange does not support querying other users. " + f"Ignoring address={address}, using authenticated user.", + UserWarning, + ) + + market_name = normalize_market_name(symbol) + params = {"market": [market_name]} + + # Endpoint: /user/leverage + response = self.get("/user/leverage", params=params, authenticated=True) + + leverage_data = response.get("data", []) or [] + if leverage_data: + for lev in leverage_data: + if lev.get("market") == market_name: + return int(lev.get("leverage", 0)) + + return None diff --git a/extended/api/info_new_sync.py b/extended/api/info_new_sync.py new file mode 100644 index 0000000..38998f8 --- /dev/null +++ b/extended/api/info_new_sync.py @@ -0,0 +1,47 @@ +""" +Sync Info API for Extended Exchange SDK. + +Provides read-only operations matching Hyperliquid's Info class interface. +Uses native sync implementation instead of async wrapper. +""" + +from typing import Any, Dict, List, Optional + +from x10.perpetual.configuration import EndpointConfig + +from extended.api.base_native_sync import BaseNativeSyncClient +from extended.api.info_native_sync import NativeSyncInfoAPI +from extended.auth import ExtendedAuth + + +class InfoAPI(NativeSyncInfoAPI): + """ + Extended Exchange Info API with Hyperliquid-compatible interface. + + Native synchronous implementation - NO async/await anywhere. + + Note: Unlike Hyperliquid, Extended requires authentication for + user-specific data. The `address` parameter is accepted for + interface compatibility but ignored (uses authenticated user). + + Example: + info = InfoAPI(auth, config) + state = info.user_state() + orders = info.open_orders() + """ + + def __init__(self, auth: ExtendedAuth, config: EndpointConfig): + """ + Initialize the sync Info API. + + Args: + auth: ExtendedAuth instance with credentials + config: Endpoint configuration + """ + # Use native sync implementation directly + super().__init__(auth, config) + + def close(self): + """Close the API and release resources.""" + # No async cleanup needed for native sync implementation + pass \ No newline at end of file diff --git a/extended/api/info_old_async_wrapper.py b/extended/api/info_old_async_wrapper.py new file mode 100644 index 0000000..4b255ad --- /dev/null +++ b/extended/api/info_old_async_wrapper.py @@ -0,0 +1,171 @@ +""" +Sync Info API for Extended Exchange SDK. + +Provides read-only operations matching Hyperliquid's Info class interface. +Wraps AsyncInfoAPI to provide synchronous interface. +""" + +from typing import Any, Dict, List, Optional + +from x10.perpetual.configuration import EndpointConfig + +from extended.api.base import BaseSyncAPI +from extended.api.info_async import AsyncInfoAPI +from extended.auth import ExtendedAuth +from extended.utils.helpers import run_sync + + +class InfoAPI(BaseSyncAPI): + """ + Extended Exchange Info API with Hyperliquid-compatible interface. + + Synchronous wrapper around AsyncInfoAPI. + + Note: Unlike Hyperliquid, Extended requires authentication for + user-specific data. The `address` parameter is accepted for + interface compatibility but ignored (uses authenticated user). + + Example: + info = InfoAPI(auth, config) + state = info.user_state() + orders = info.open_orders() + """ + + def __init__(self, auth: ExtendedAuth, config: EndpointConfig): + """ + Initialize the sync Info API. + + Args: + auth: ExtendedAuth instance with credentials + config: Endpoint configuration + """ + super().__init__(auth, config) + self._async = AsyncInfoAPI(auth, config) + + def user_state(self, address: Optional[str] = None) -> Dict[str, Any]: + """ + Get user's account state (balance + positions). + + Args: + address: Ignored (Extended requires auth, uses authenticated user) + + Returns: + Dict with Hyperliquid-compatible structure containing: + - assetPositions: List of position info + - crossMarginSummary: Account value and margin info + - marginSummary: Margin details + - withdrawable: Available for withdrawal + """ + return run_sync(lambda: self._async.user_state(address)) + + def open_orders(self, address: Optional[str] = None) -> List[Dict[str, Any]]: + """ + Get user's open orders. + + Args: + address: Ignored (uses authenticated user) + + Returns: + List of orders in Hyperliquid format + """ + return run_sync(lambda: self._async.open_orders(address)) + + def meta(self) -> Dict[str, Any]: + """ + Get exchange metadata (markets info). + + Returns: + Dict with Hyperliquid-compatible structure with universe list + """ + return run_sync(lambda: self._async.meta()) + + def all_mids(self) -> Dict[str, str]: + """ + Get mid prices for all markets. + + Returns: + Dict mapping coin name to mid price string + """ + return run_sync(lambda: self._async.all_mids()) + + def l2_snapshot(self, name: str) -> Dict[str, Any]: + """ + Get order book snapshot. + + Args: + name: Market name (e.g., "BTC-USD" or "BTC") + + Returns: + Dict in Hyperliquid format with coin, levels, and time + """ + return run_sync(lambda: self._async.l2_snapshot(name)) + + def candles_snapshot( + self, + name: str, + interval: str, + startTime: int, + endTime: int, + candle_type: str = "trades", + ) -> List[Dict[str, Any]]: + """ + Get historical candles. + + Args: + name: Market name (e.g., "BTC" or "BTC-USD") + interval: "1m", "5m", "15m", "30m", "1h", "2h", "4h", "1d" + startTime: Start timestamp (ms) + endTime: End timestamp (ms) + candle_type: Type of candle data (default "trades") + + Returns: + List of candles in Hyperliquid format + """ + return run_sync( + lambda: self._async.candles_snapshot(name, interval, startTime, endTime, candle_type) + ) + + def user_fills( + self, + coin: Optional[str] = None, + address: Optional[str] = None, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + ) -> List[Dict[str, Any]]: + """ + Get user's trade fills. + + Args: + coin: Market name (optional - if None, returns fills for all markets) + address: Ignored (uses authenticated user) + start_time: Optional start timestamp (ms) + end_time: Optional end timestamp (ms) + + Returns: + List of fills in Hyperliquid format (up to 1000 most recent) + + Note: + The `cloid` field will be null in responses. + """ + return run_sync(lambda: self._async.user_fills(coin, address, start_time, end_time)) + + def get_position_leverage( + self, + symbol: str, + address: Optional[str] = None, + ) -> Optional[int]: + """ + Get current leverage for a position. + + Args: + symbol: Market name (e.g., "BTC" or "BTC-USD") + address: Ignored (uses authenticated user) + + Returns: + Current leverage as integer, or None if not found + """ + return run_sync(lambda: self._async.get_position_leverage(symbol, address)) + + def close(self): + """Close the API and release resources.""" + run_sync(lambda: self._async.close()) diff --git a/extended/async_client.py b/extended/async_client.py new file mode 100644 index 0000000..fc2cbb2 --- /dev/null +++ b/extended/async_client.py @@ -0,0 +1,107 @@ +""" +Async Client for Extended Exchange SDK. + +Async version of the main entry point matching Hyperliquid/Pacifica Client class. +""" + +from typing import Optional + +from extended.api.exchange_async import AsyncExchangeAPI +from extended.api.info_async import AsyncInfoAPI +from extended.auth import ExtendedAuth +from extended.config import MAINNET_CONFIG, TESTNET_CONFIG + + +class AsyncClient: + """ + Async Extended Exchange client with Hyperliquid-compatible interface. + + Provides asynchronous access to Info and Exchange APIs. + + Usage: + client = AsyncClient( + api_key="your-api-key", + vault=12345, + stark_private_key="0x...", + stark_public_key="0x...", + testnet=True, + ) + + # Info operations + state = await client.info.user_state() + orders = await client.info.open_orders() + + # Exchange operations + result = await client.exchange.order("BTC", is_buy=True, sz=0.01, limit_px=50000) + await client.exchange.cancel("BTC", oid=12345) + + # Clean up + await client.close() + + Note: + Credentials (api_key, vault, stark keys) must be obtained from your + onboarding infrastructure. This SDK does not perform onboarding. + """ + + def __init__( + self, + api_key: str, + vault: int, + stark_private_key: str, + stark_public_key: str, + testnet: bool = False, + base_url: Optional[str] = None, + timeout: int = 30, + ): + """ + Initialize async Extended client. + + Args: + api_key: Extended Exchange API key + vault: Account/vault ID + stark_private_key: L2 Stark private key (hex string) + stark_public_key: L2 Stark public key (hex string) + testnet: Use testnet (default False) + base_url: Custom API base URL (optional, not currently used) + timeout: Request timeout in seconds (default 30, not currently used) + + Note: + Credentials must be obtained from your onboarding infrastructure. + This SDK does not perform onboarding. + """ + self._auth = ExtendedAuth( + api_key=api_key, + vault=vault, + stark_private_key=stark_private_key, + stark_public_key=stark_public_key, + testnet=testnet, + ) + self._config = self._auth.get_config() + self._timeout = timeout + + self._info = AsyncInfoAPI(self._auth, self._config) + self._exchange = AsyncExchangeAPI(self._auth, self._config) + + @property + def info(self) -> AsyncInfoAPI: + """Access async Info API for read operations.""" + return self._info + + @property + def exchange(self) -> AsyncExchangeAPI: + """Access async Exchange API for trading operations.""" + return self._exchange + + @property + def address(self) -> Optional[str]: + """Get the authenticated user's address (stark public key).""" + return self._auth.address + + @property + def public_key(self) -> Optional[str]: + """Get the L2 public key.""" + return self._auth.stark_public_key + + async def close(self): + """Close the client and release resources.""" + await self._auth.close() diff --git a/extended/auth.py b/extended/auth.py new file mode 100644 index 0000000..ce1ba1d --- /dev/null +++ b/extended/auth.py @@ -0,0 +1,122 @@ +""" +Authentication module for Extended Exchange SDK. + +Manages pre-configured Extended credentials for use with the SDK. + +Note: Onboarding (L1->L2 key derivation, account creation, API key generation) +is handled by a separate infrastructure component. This SDK assumes credentials +are already available. +""" + +from typing import Optional + +from x10.perpetual.accounts import StarkPerpetualAccount +from x10.perpetual.configuration import EndpointConfig, TESTNET_CONFIG, MAINNET_CONFIG +from x10.perpetual.trading_client import PerpetualTradingClient + + +class ExtendedAuth: + """ + Manages Extended Exchange authentication credentials. + + Accepts pre-configured credentials from your onboarding infrastructure: + - api_key: Extended Exchange API key + - vault: Account/vault ID + - stark_private_key: L2 Stark private key (hex string) + - stark_public_key: L2 Stark public key (hex string) + + Example: + auth = ExtendedAuth( + api_key="your-api-key", + vault=12345, + stark_private_key="0x...", + stark_public_key="0x...", + testnet=True, + ) + + # Get the configured trading client + client = auth.get_trading_client() + """ + + def __init__( + self, + api_key: str, + vault: int, + stark_private_key: str, + stark_public_key: str, + testnet: bool = False, + ): + """ + Initialize authentication with pre-configured credentials. + + Args: + api_key: Extended Exchange API key + vault: Account/vault ID + stark_private_key: L2 Stark private key (hex string, e.g., "0x...") + stark_public_key: L2 Stark public key (hex string, e.g., "0x...") + testnet: Use testnet configuration (default False) + """ + self.api_key = api_key + self.vault = vault + self.stark_private_key = stark_private_key + self.stark_public_key = stark_public_key + self.testnet = testnet + self._stark_account: Optional[StarkPerpetualAccount] = None + self._config: EndpointConfig = TESTNET_CONFIG if testnet else MAINNET_CONFIG + self._trading_client: Optional[PerpetualTradingClient] = None + + @property + def address(self) -> str: + """ + Return the stark public key as the user's address/identifier. + + This is used for Hyperliquid interface compatibility. + """ + return self.stark_public_key + + def get_stark_account(self) -> StarkPerpetualAccount: + """ + Get or create StarkPerpetualAccount from credentials. + + Returns: + Configured StarkPerpetualAccount instance + """ + if self._stark_account is None: + self._stark_account = StarkPerpetualAccount( + vault=self.vault, + private_key=self.stark_private_key, + public_key=self.stark_public_key, + api_key=self.api_key, + ) + return self._stark_account + + def get_config(self) -> EndpointConfig: + """ + Return endpoint configuration. + + Returns: + TESTNET_CONFIG or MAINNET_CONFIG based on testnet flag + """ + return self._config + + def get_trading_client(self) -> PerpetualTradingClient: + """ + Create or return a configured PerpetualTradingClient. + + Returns: + Configured PerpetualTradingClient instance + """ + if self._trading_client is None: + self._trading_client = PerpetualTradingClient( + endpoint_config=self._config, + stark_account=self.get_stark_account(), + ) + return self._trading_client + + async def close(self): + """ + Close the trading client and release resources. + """ + if self._trading_client is not None: + await self._trading_client.close() + self._trading_client = None diff --git a/extended/auth_sync.py b/extended/auth_sync.py new file mode 100644 index 0000000..700dfae --- /dev/null +++ b/extended/auth_sync.py @@ -0,0 +1,41 @@ +""" +Simplified sync auth interface for Extended Exchange SDK. + +Provides the minimal auth interface needed by the native sync implementation +without async dependencies. +""" + +from typing import Optional +from dataclasses import dataclass + + +@dataclass +class SimpleSyncAuth: + """ + Simplified synchronous auth interface. + + Contains only the essential fields needed by the native sync HTTP client. + No async dependencies or complex crypto operations. + """ + + api_key: str + vault: int + stark_private_key: str + stark_public_key: str + testnet: bool = False + + @property + def address(self) -> str: + """Get the account address (stark public key).""" + return self.stark_public_key + + def get_auth_headers(self) -> dict: + """Get authentication headers for HTTP requests.""" + return { + "X-Api-Key": self.api_key, + "Content-Type": "application/json" + } + + +# For compatibility with existing code that expects ExtendedAuth +ExtendedAuth = SimpleSyncAuth \ No newline at end of file diff --git a/extended/client.py b/extended/client.py new file mode 100644 index 0000000..e30a410 --- /dev/null +++ b/extended/client.py @@ -0,0 +1,115 @@ +""" +Native Sync Client for Extended Exchange SDK. + +Main entry point matching Hyperliquid/Pacifica Client class. +Uses native sync implementation instead of wrapper approach. +""" + +from typing import Optional + +from extended.api.exchange import ExchangeAPI +from extended.api.info import InfoAPI +from extended.auth_sync import SimpleSyncAuth +from extended.config_sync import SimpleSyncConfig, MAINNET_CONFIG, TESTNET_CONFIG + + +class Client: + """ + Extended Exchange client with Hyperliquid-compatible interface. + + Provides synchronous access to Info and Exchange APIs using NATIVE SYNC implementation. + Pure synchronous operation throughout. + + Usage: + client = Client( + api_key="your-api-key", + vault=12345, + stark_private_key="0x...", + stark_public_key="0x...", + testnet=True, + ) + + # Info operations - NATIVE SYNC + state = client.info.user_state() + orders = client.info.open_orders() + + # Exchange operations - NATIVE SYNC + client.exchange.order("BTC", is_buy=True, sz=0.01, limit_px=50000) + client.exchange.cancel("BTC", oid=12345) + + Note: + Credentials (api_key, vault, stark keys) must be obtained from your + onboarding infrastructure. This SDK does not perform onboarding. + """ + + def __init__( + self, + api_key: str, + vault: int, + stark_private_key: str, + stark_public_key: str, + testnet: bool = False, + base_url: Optional[str] = None, + timeout: int = 30, + ): + """ + Initialize Extended client with NATIVE SYNC implementation. + + Args: + api_key: Extended Exchange API key + vault: Account/vault ID + stark_private_key: L2 Stark private key (hex string) + stark_public_key: L2 Stark public key (hex string) + testnet: Use testnet (default False) + base_url: Custom API base URL (optional) + timeout: Request timeout in seconds + + Note: + Credentials must be obtained from your onboarding infrastructure. + This SDK does not perform onboarding. + """ + self._auth = SimpleSyncAuth( + api_key=api_key, + vault=vault, + stark_private_key=stark_private_key, + stark_public_key=stark_public_key, + testnet=testnet, + ) + + # Use appropriate config + self._config = TESTNET_CONFIG if testnet else MAINNET_CONFIG + if base_url: + # Create custom config with provided base_url + self._config = SimpleSyncConfig(api_base_url=base_url) + + self._timeout = timeout + + # Create NATIVE SYNC APIs - pure sync implementation + self._info = InfoAPI(self._auth, self._config) + self._exchange = ExchangeAPI(self._auth, self._config) + + @property + def info(self) -> InfoAPI: + """Access Info API for read operations - NATIVE SYNC.""" + return self._info + + @property + def exchange(self) -> ExchangeAPI: + """Access Exchange API for trading operations - NATIVE SYNC.""" + return self._exchange + + @property + def address(self) -> Optional[str]: + """Get the authenticated user's address (stark public key).""" + return self._auth.address + + @property + def public_key(self) -> Optional[str]: + """Get the L2 public key.""" + return self._auth.stark_public_key + + def close(self): + """Close the client and release resources - NATIVE SYNC.""" + self._info.close() + self._exchange.close() + # No async cleanup needed for native sync implementation \ No newline at end of file diff --git a/extended/client_new_sync.py b/extended/client_new_sync.py new file mode 100644 index 0000000..03ed144 --- /dev/null +++ b/extended/client_new_sync.py @@ -0,0 +1,116 @@ +""" +Native Sync Client for Extended Exchange SDK. + +Main entry point matching Hyperliquid/Pacifica Client class. +REPLACES problematic run_sync() approach with native sync implementation. +""" + +from typing import Optional + +from extended.api.exchange_new_sync import ExchangeAPI +from extended.api.info_new_sync import InfoAPI +from extended.auth import ExtendedAuth +from extended.config import MAINNET_CONFIG, TESTNET_CONFIG + + +class Client: + """ + Extended Exchange client with Hyperliquid-compatible interface. + + Provides synchronous access to Info and Exchange APIs using NATIVE SYNC implementation. + NO async/await or run_sync() anywhere - pure synchronous operation. + + Usage: + client = Client( + api_key="your-api-key", + vault=12345, + stark_private_key="0x...", + stark_public_key="0x...", + testnet=True, + ) + + # Info operations - NATIVE SYNC + state = client.info.user_state() + orders = client.info.open_orders() + + # Exchange operations - NATIVE SYNC + client.exchange.order("BTC", is_buy=True, sz=0.01, limit_px=50000) + client.exchange.cancel("BTC", oid=12345) + + Note: + Credentials (api_key, vault, stark keys) must be obtained from your + onboarding infrastructure. This SDK does not perform onboarding. + """ + + def __init__( + self, + api_key: str, + vault: int, + stark_private_key: str, + stark_public_key: str, + testnet: bool = False, + base_url: Optional[str] = None, + timeout: int = 30, + ): + """ + Initialize Extended client with NATIVE SYNC implementation. + + Args: + api_key: Extended Exchange API key + vault: Account/vault ID + stark_private_key: L2 Stark private key (hex string) + stark_public_key: L2 Stark public key (hex string) + testnet: Use testnet (default False) + base_url: Custom API base URL (optional) + timeout: Request timeout in seconds + + Note: + Credentials must be obtained from your onboarding infrastructure. + This SDK does not perform onboarding. + """ + self._auth = ExtendedAuth( + api_key=api_key, + vault=vault, + stark_private_key=stark_private_key, + stark_public_key=stark_public_key, + testnet=testnet, + ) + + # Use appropriate config + self._config = TESTNET_CONFIG if testnet else MAINNET_CONFIG + if base_url: + # Create custom config with provided base_url + from x10.perpetual.configuration import EndpointConfig + self._config = EndpointConfig(api_base_url=base_url) + + self._timeout = timeout + + # Create NATIVE SYNC APIs - NO run_sync() wrapper + self._info = InfoAPI(self._auth, self._config) + self._exchange = ExchangeAPI(self._auth, self._config) + + @property + def info(self) -> InfoAPI: + """Access Info API for read operations - NATIVE SYNC.""" + return self._info + + @property + def exchange(self) -> ExchangeAPI: + """Access Exchange API for trading operations - NATIVE SYNC.""" + return self._exchange + + @property + def address(self) -> Optional[str]: + """Get the authenticated user's address (stark public key).""" + return self._auth.address + + @property + def public_key(self) -> Optional[str]: + """Get the L2 public key.""" + return self._auth.stark_public_key + + def close(self): + """Close the client and release resources - NATIVE SYNC.""" + self._info.close() + self._exchange.close() + # No async cleanup needed for native sync implementation \ No newline at end of file diff --git a/extended/client_old_async_wrapper.py b/extended/client_old_async_wrapper.py new file mode 100644 index 0000000..12e0a02 --- /dev/null +++ b/extended/client_old_async_wrapper.py @@ -0,0 +1,106 @@ +""" +Sync Client for Extended Exchange SDK. + +Main entry point matching Hyperliquid/Pacifica Client class. +""" + +from typing import Optional + +from extended.api.exchange import ExchangeAPI +from extended.api.info import InfoAPI +from extended.auth import ExtendedAuth +from extended.config import MAINNET_CONFIG, TESTNET_CONFIG +from extended.utils.helpers import run_sync + + +class Client: + """ + Extended Exchange client with Hyperliquid-compatible interface. + + Provides synchronous access to Info and Exchange APIs. + + Usage: + client = Client( + api_key="your-api-key", + vault=12345, + stark_private_key="0x...", + stark_public_key="0x...", + testnet=True, + ) + + # Info operations + state = client.info.user_state() + orders = client.info.open_orders() + + # Exchange operations + client.exchange.order("BTC", is_buy=True, sz=0.01, limit_px=50000) + client.exchange.cancel("BTC", oid=12345) + + Note: + Credentials (api_key, vault, stark keys) must be obtained from your + onboarding infrastructure. This SDK does not perform onboarding. + """ + + def __init__( + self, + api_key: str, + vault: int, + stark_private_key: str, + stark_public_key: str, + testnet: bool = False, + base_url: Optional[str] = None, + timeout: int = 30, + ): + """ + Initialize Extended client. + + Args: + api_key: Extended Exchange API key + vault: Account/vault ID + stark_private_key: L2 Stark private key (hex string) + stark_public_key: L2 Stark public key (hex string) + testnet: Use testnet (default False) + base_url: Custom API base URL (optional, not currently used) + timeout: Request timeout in seconds (default 30, not currently used) + + Note: + Credentials must be obtained from your onboarding infrastructure. + This SDK does not perform onboarding. + """ + self._auth = ExtendedAuth( + api_key=api_key, + vault=vault, + stark_private_key=stark_private_key, + stark_public_key=stark_public_key, + testnet=testnet, + ) + self._config = self._auth.get_config() + self._timeout = timeout + + self._info = InfoAPI(self._auth, self._config) + self._exchange = ExchangeAPI(self._auth, self._config) + + @property + def info(self) -> InfoAPI: + """Access Info API for read operations.""" + return self._info + + @property + def exchange(self) -> ExchangeAPI: + """Access Exchange API for trading operations.""" + return self._exchange + + @property + def address(self) -> Optional[str]: + """Get the authenticated user's address (stark public key).""" + return self._auth.address + + @property + def public_key(self) -> Optional[str]: + """Get the L2 public key.""" + return self._auth.stark_public_key + + def close(self): + """Close the client and release resources.""" + self._info.close() + # Exchange uses the same auth, so closing info is sufficient diff --git a/extended/config.py b/extended/config.py new file mode 100644 index 0000000..f23add7 --- /dev/null +++ b/extended/config.py @@ -0,0 +1,19 @@ +""" +Configuration for Extended Exchange SDK. + +Re-exports the endpoint configurations from x10 for convenience. +""" + +from x10.perpetual.configuration import ( + EndpointConfig, + MAINNET_CONFIG, + TESTNET_CONFIG, + StarknetDomain, +) + +__all__ = [ + "EndpointConfig", + "MAINNET_CONFIG", + "TESTNET_CONFIG", + "StarknetDomain", +] diff --git a/extended/config_sync.py b/extended/config_sync.py new file mode 100644 index 0000000..8d5d84d --- /dev/null +++ b/extended/config_sync.py @@ -0,0 +1,48 @@ +""" +Simplified sync configuration for Extended Exchange SDK. + +Provides the minimal config interface needed by the native sync implementation +without X10 dependencies. +""" + +from typing import Optional +from dataclasses import dataclass + + +@dataclass +class SimpleSyncConfig: + """ + Simplified synchronous config interface. + + Contains only the essential fields needed by the native sync HTTP client. + No X10 or async dependencies. + """ + + api_base_url: str + signing_domain: str = "" + timeout: int = 30 + + def __init__(self, api_base_url: str = None, signing_domain: str = "", testnet: bool = False, timeout: int = 30): + if api_base_url: + self.api_base_url = api_base_url + elif testnet: + self.api_base_url = "https://api.starknet.sepolia.extended.exchange/api/v1" + else: + self.api_base_url = "https://api.starknet.extended.exchange/api/v1" + + if signing_domain: + self.signing_domain = signing_domain + elif testnet: + self.signing_domain = "starknet.sepolia.extended.exchange" + else: + self.signing_domain = "extended.exchange" + + self.timeout = timeout + + +# Default configurations +MAINNET_CONFIG = SimpleSyncConfig(testnet=False) +TESTNET_CONFIG = SimpleSyncConfig(testnet=True) + +# For compatibility with existing code +EndpointConfig = SimpleSyncConfig \ No newline at end of file diff --git a/extended/exceptions.py b/extended/exceptions.py new file mode 100644 index 0000000..2ab9d49 --- /dev/null +++ b/extended/exceptions.py @@ -0,0 +1,76 @@ +""" +Custom exceptions for Extended Exchange SDK. + +Follows the Hyperliquid/Pacifica error handling pattern. +""" + +from typing import Any, Dict, Optional + + +class ExtendedError(Exception): + """Base exception for Extended SDK.""" + + pass + + +class ExtendedAPIError(ExtendedError): + """ + API error with status code and message. + + Raised when the Extended API returns an error response. + """ + + def __init__( + self, + status_code: int, + message: str, + response: Optional[Dict[str, Any]] = None, + ): + self.status_code = status_code + self.message = message + self.response = response + super().__init__(f"[{status_code}] {message}") + + +class ExtendedAuthError(ExtendedError): + """ + Authentication error. + + Raised when authentication fails (invalid API key, invalid signature, etc.). + """ + + pass + + +class ExtendedRateLimitError(ExtendedAPIError): + """ + Rate limit exceeded (HTTP 429). + + Raised when the API rate limit is exceeded. + Note: Following Hyperliquid/Pacifica pattern, we do NOT implement + automatic retry. The caller is responsible for handling this. + """ + + def __init__(self, message: str = "Rate limit exceeded"): + super().__init__(429, message) + + +class ExtendedValidationError(ExtendedError): + """ + Validation error for request parameters. + + Raised when request parameters fail validation before being sent to the API. + """ + + pass + + +class ExtendedNotFoundError(ExtendedAPIError): + """ + Resource not found error (HTTP 404). + + Raised when a requested resource (order, position, etc.) is not found. + """ + + def __init__(self, message: str = "Resource not found"): + super().__init__(404, message) diff --git a/extended/exceptions_sync.py b/extended/exceptions_sync.py new file mode 100644 index 0000000..be0e814 --- /dev/null +++ b/extended/exceptions_sync.py @@ -0,0 +1,39 @@ +""" +Simple sync exceptions for Extended Exchange SDK. + +Minimal exception classes to avoid dependencies. +""" + + +class ExtendedError(Exception): + """Base exception for Extended SDK.""" + pass + + +class ExtendedAPIError(ExtendedError): + """API-related errors.""" + def __init__(self, status_code: int, message: str, data=None): + self.status_code = status_code + self.message = message + self.data = data + super().__init__(message) + + +class ExtendedAuthError(ExtendedError): + """Authentication-related errors.""" + pass + + +class ExtendedRateLimitError(ExtendedError): + """Rate limiting errors.""" + pass + + +class ExtendedValidationError(ExtendedError): + """Validation errors.""" + pass + + +class ExtendedNotFoundError(ExtendedError): + """Not found errors.""" + pass \ No newline at end of file diff --git a/extended/setup.py b/extended/setup.py new file mode 100644 index 0000000..6719edd --- /dev/null +++ b/extended/setup.py @@ -0,0 +1,78 @@ +""" +Hyperliquid-style setup functions for Native Sync Extended Exchange SDK. + +Provides setup() function that returns (address, info, exchange) tuples, +matching Hyperliquid/Pacifica's interface exactly. +NATIVE SYNC ONLY - no async dependencies. +""" + +from typing import Optional, Tuple + +from extended.api.exchange import ExchangeAPI +from extended.api.info import InfoAPI +from extended.client import Client + + +def setup( + api_key: str, + vault: int, + stark_private_key: str, + stark_public_key: str, + testnet: bool = False, + base_url: Optional[str] = None, +) -> Tuple[str, InfoAPI, ExchangeAPI]: + """ + Initialize Extended SDK with Hyperliquid-compatible return format - NATIVE SYNC. + + This is a convenience function that mirrors the Hyperliquid/Pacifica setup() + pattern, returning a tuple of (address, info, exchange) for easy integration + with existing trading engines. + + NATIVE SYNC IMPLEMENTATION - pure synchronous. + + Args: + api_key: Extended Exchange API key + vault: Account/vault ID + stark_private_key: L2 Stark private key (hex string) + stark_public_key: L2 Stark public key (hex string) + testnet: Use testnet environment (default False) + base_url: Optional custom API base URL + + Returns: + Tuple[str, InfoAPI, ExchangeAPI]: A tuple containing: + - address: The stark public key (used as identifier) + - info: InfoAPI instance for read operations (NATIVE SYNC) + - exchange: ExchangeAPI instance for trading operations (NATIVE SYNC) + + Example: + from extended import setup + + address, info, exchange = setup( + api_key="your-api-key", + vault=12345, + stark_private_key="0x...", + stark_public_key="0x...", + testnet=True + ) + + # Now use exactly like Hyperliquid/Pacifica - NATIVE SYNC + state = info.user_state() # Pure sync - no waiting needed + exchange.order("BTC", is_buy=True, sz=0.01, limit_px=50000) # Pure sync + + Note: + Credentials (api_key, vault, stark keys) must be obtained from your + onboarding infrastructure. This SDK does not perform onboarding. + """ + # Create native sync client - NO async dependencies + client = Client( + api_key=api_key, + vault=vault, + stark_private_key=stark_private_key, + stark_public_key=stark_public_key, + testnet=testnet, + base_url=base_url, + ) + + # Return (address, info, exchange) tuple like Hyperliquid/Pacifica + # All components are NATIVE SYNC + return (client.public_key, client.info, client.exchange) \ No newline at end of file diff --git a/extended/setup_native_sync.py b/extended/setup_native_sync.py new file mode 100644 index 0000000..bfda056 --- /dev/null +++ b/extended/setup_native_sync.py @@ -0,0 +1,78 @@ +""" +Hyperliquid-style setup functions for Native Sync Extended Exchange SDK. + +Provides setup() function that returns (address, info, exchange) tuples, +matching Hyperliquid/Pacifica's interface exactly. +NATIVE SYNC ONLY - no async dependencies. +""" + +from typing import Optional, Tuple + +from extended.api.exchange import ExchangeAPI +from extended.api.info import InfoAPI +from extended.client import Client + + +def setup( + api_key: str, + vault: int, + stark_private_key: str, + stark_public_key: str, + testnet: bool = False, + base_url: Optional[str] = None, +) -> Tuple[str, InfoAPI, ExchangeAPI]: + """ + Initialize Extended SDK with Hyperliquid-compatible return format - NATIVE SYNC. + + This is a convenience function that mirrors the Hyperliquid/Pacifica setup() + pattern, returning a tuple of (address, info, exchange) for easy integration + with existing trading engines. + + NATIVE SYNC IMPLEMENTATION - no async/await dependencies. + + Args: + api_key: Extended Exchange API key + vault: Account/vault ID + stark_private_key: L2 Stark private key (hex string) + stark_public_key: L2 Stark public key (hex string) + testnet: Use testnet environment (default False) + base_url: Optional custom API base URL + + Returns: + Tuple[str, InfoAPI, ExchangeAPI]: A tuple containing: + - address: The stark public key (used as identifier) + - info: InfoAPI instance for read operations (NATIVE SYNC) + - exchange: ExchangeAPI instance for trading operations (NATIVE SYNC) + + Example: + from extended import setup + + address, info, exchange = setup( + api_key="your-api-key", + vault=12345, + stark_private_key="0x...", + stark_public_key="0x...", + testnet=True + ) + + # Now use exactly like Hyperliquid/Pacifica - NATIVE SYNC + state = info.user_state() # No await needed + exchange.order("BTC", is_buy=True, sz=0.01, limit_px=50000) # No await needed + + Note: + Credentials (api_key, vault, stark keys) must be obtained from your + onboarding infrastructure. This SDK does not perform onboarding. + """ + # Create native sync client - NO async dependencies + client = Client( + api_key=api_key, + vault=vault, + stark_private_key=stark_private_key, + stark_public_key=stark_public_key, + testnet=testnet, + base_url=base_url, + ) + + # Return (address, info, exchange) tuple like Hyperliquid/Pacifica + # All components are NATIVE SYNC + return (client.public_key, client.info, client.exchange) \ No newline at end of file diff --git a/extended/setup_old_async.py b/extended/setup_old_async.py new file mode 100644 index 0000000..3d4bb09 --- /dev/null +++ b/extended/setup_old_async.py @@ -0,0 +1,136 @@ +""" +Hyperliquid-style setup functions for Extended Exchange SDK. + +Provides setup() and async_setup() functions that return +(address, info, exchange) tuples, matching Hyperliquid/Pacifica's interface. +""" + +from typing import Optional, Tuple + +from extended.api.exchange import ExchangeAPI +from extended.api.exchange_async import AsyncExchangeAPI +from extended.api.info import InfoAPI +from extended.api.info_async import AsyncInfoAPI +from extended.client import Client +from extended.async_client import AsyncClient + + +def setup( + api_key: str, + vault: int, + stark_private_key: str, + stark_public_key: str, + testnet: bool = False, + base_url: Optional[str] = None, +) -> Tuple[str, InfoAPI, ExchangeAPI]: + """ + Initialize Extended SDK with Hyperliquid-compatible return format. + + This is a convenience function that mirrors the Hyperliquid/Pacifica setup() + pattern, returning a tuple of (address, info, exchange) for easy integration + with existing trading engines. + + Args: + api_key: Extended Exchange API key + vault: Account/vault ID + stark_private_key: L2 Stark private key (hex string) + stark_public_key: L2 Stark public key (hex string) + testnet: Use testnet environment (default False) + base_url: Optional custom API base URL (not currently used) + + Returns: + Tuple[str, InfoAPI, ExchangeAPI]: A tuple containing: + - address: The stark public key (used as identifier) + - info: InfoAPI instance for read operations + - exchange: ExchangeAPI instance for trading operations + + Example: + from extended import setup + + address, info, exchange = setup( + api_key="your-api-key", + vault=12345, + stark_private_key="0x...", + stark_public_key="0x...", + testnet=True + ) + + # Now use exactly like Hyperliquid + state = info.user_state() + exchange.order("BTC", is_buy=True, sz=0.01, limit_px=50000) + + Note: + Credentials (api_key, vault, stark keys) must be obtained from your + onboarding infrastructure. This SDK does not perform onboarding. + """ + client = Client( + api_key=api_key, + vault=vault, + stark_private_key=stark_private_key, + stark_public_key=stark_public_key, + testnet=testnet, + base_url=base_url, + ) + + # Return (address, info, exchange) tuple like Hyperliquid + return (client.public_key, client.info, client.exchange) + + +def async_setup( + api_key: str, + vault: int, + stark_private_key: str, + stark_public_key: str, + testnet: bool = False, + base_url: Optional[str] = None, +) -> Tuple[str, AsyncInfoAPI, AsyncExchangeAPI]: + """ + Async version of setup() for use with AsyncClient. + + Returns a tuple of (address, info, exchange) where info and exchange + are async API instances. + + Args: + api_key: Extended Exchange API key + vault: Account/vault ID + stark_private_key: L2 Stark private key (hex string) + stark_public_key: L2 Stark public key (hex string) + testnet: Use testnet environment (default False) + base_url: Optional custom API base URL (not currently used) + + Returns: + Tuple[str, AsyncInfoAPI, AsyncExchangeAPI]: A tuple containing: + - address: The stark public key (used as identifier) + - info: AsyncInfoAPI instance for async read operations + - exchange: AsyncExchangeAPI instance for async trading operations + + Example: + from extended import async_setup + + address, info, exchange = async_setup( + api_key="your-api-key", + vault=12345, + stark_private_key="0x...", + stark_public_key="0x...", + testnet=True + ) + + # Now use exactly like Hyperliquid (async) + state = await info.user_state() + await exchange.order("BTC", is_buy=True, sz=0.01, limit_px=50000) + + Note: + Credentials (api_key, vault, stark keys) must be obtained from your + onboarding infrastructure. This SDK does not perform onboarding. + """ + client = AsyncClient( + api_key=api_key, + vault=vault, + stark_private_key=stark_private_key, + stark_public_key=stark_public_key, + testnet=testnet, + base_url=base_url, + ) + + # Return (address, info, exchange) tuple like Hyperliquid + return (client.public_key, client.info, client.exchange) diff --git a/extended/transformers/__init__.py b/extended/transformers/__init__.py new file mode 100644 index 0000000..5a76a2d --- /dev/null +++ b/extended/transformers/__init__.py @@ -0,0 +1,13 @@ +""" +Response transformers to convert Extended responses to Hyperliquid format. +""" + +from extended.transformers.account import AccountTransformer +from extended.transformers.market import MarketTransformer +from extended.transformers.order import OrderTransformer + +__all__ = [ + "AccountTransformer", + "MarketTransformer", + "OrderTransformer", +] diff --git a/extended/transformers/account.py b/extended/transformers/account.py new file mode 100644 index 0000000..24ad037 --- /dev/null +++ b/extended/transformers/account.py @@ -0,0 +1,126 @@ +""" +Account and position transformers. + +Converts Extended balance and position data to Hyperliquid format. +""" + +from decimal import Decimal +from typing import Any, Dict, List, Optional + +from x10.perpetual.balances import BalanceModel +from x10.perpetual.positions import PositionModel + +from extended.utils.helpers import to_hyperliquid_market_name + + +class AccountTransformer: + """Transform Extended account data to Hyperliquid format.""" + + @staticmethod + def transform_user_state( + balance: BalanceModel, + positions: List[PositionModel], + ) -> Dict[str, Any]: + """ + Transform Extended balance + positions to Hyperliquid user_state format. + + Args: + balance: Extended BalanceModel + positions: List of Extended PositionModel + + Returns: + Dict in Hyperliquid user_state format + """ + # Calculate totals + total_position_value = sum(pos.value for pos in positions) + + # Transform positions + asset_positions = [] + for pos in positions: + asset_positions.append( + AccountTransformer.transform_position(pos) + ) + + return { + "assetPositions": asset_positions, + "crossMaintenanceMarginUsed": str(balance.initial_margin), + "crossMarginSummary": { + "accountValue": str(balance.equity), + "totalMarginUsed": str(balance.initial_margin), + "totalNtlPos": str(total_position_value), + "totalRawUsd": str(balance.balance), + }, + "marginSummary": { + "accountValue": str(balance.equity), + "totalMarginUsed": str(balance.initial_margin), + "totalNtlPos": str(total_position_value), + "totalRawUsd": str(balance.balance), + "withdrawable": str(balance.available_for_trade), + }, + "withdrawable": str(balance.available_for_trade), + } + + @staticmethod + def transform_position(position: PositionModel) -> Dict[str, Any]: + """ + Transform Extended position to Hyperliquid assetPosition format. + + Args: + position: Extended PositionModel + + Returns: + Dict in Hyperliquid assetPosition format + """ + # Signed size: positive for LONG, negative for SHORT + size = position.size + # Handle both enum and string types for side + side_value = position.side.value if hasattr(position.side, 'value') else position.side + szi = str(size) if side_value == "LONG" else str(-size) + + leverage = int(position.leverage) + margin_used = position.value / leverage if leverage > 0 else Decimal(0) + roe = ( + position.unrealised_pnl / margin_used + if margin_used > 0 + else Decimal(0) + ) + + return { + "position": { + "coin": to_hyperliquid_market_name(position.market), + "szi": szi, + "leverage": {"type": "cross", "value": leverage}, + "entryPx": str(position.open_price), + "positionValue": str(position.value), + "unrealizedPnl": str(position.unrealised_pnl), + "liquidationPx": ( + str(position.liquidation_price) + if position.liquidation_price + else None + ), + "marginUsed": str(margin_used), + "returnOnEquity": str(roe), + }, + "type": "oneWay", + } + + @staticmethod + def transform_balance(balance: BalanceModel) -> Dict[str, Any]: + """ + Transform Extended balance to a simple balance dict. + + Args: + balance: Extended BalanceModel + + Returns: + Dict with balance information + """ + return { + "balance": str(balance.balance), + "equity": str(balance.equity), + "available_for_trade": str(balance.available_for_trade), + "available_for_withdrawal": str(balance.available_for_withdrawal), + "unrealised_pnl": str(balance.unrealised_pnl), + "initial_margin": str(balance.initial_margin), + "margin_ratio": str(balance.margin_ratio), + } diff --git a/extended/transformers/market.py b/extended/transformers/market.py new file mode 100644 index 0000000..65ac78b --- /dev/null +++ b/extended/transformers/market.py @@ -0,0 +1,162 @@ +""" +Market data transformers. + +Converts Extended market data to Hyperliquid format. +""" + +import time +from decimal import Decimal +from typing import Any, Dict, List, Optional + +from x10.perpetual.candles import CandleModel +from x10.perpetual.markets import MarketModel, MarketStatsModel +from x10.perpetual.orderbooks import OrderbookUpdateModel + +from extended.utils.constants import INTERVAL_MS +from extended.utils.helpers import ( + calculate_sz_decimals, + to_hyperliquid_market_name, +) + + +class MarketTransformer: + """Transform Extended market data to Hyperliquid format.""" + + @staticmethod + def transform_meta(markets: List[MarketModel]) -> Dict[str, Any]: + """ + Transform Extended markets list to Hyperliquid meta format. + + Args: + markets: List of Extended MarketModel + + Returns: + Dict in Hyperliquid meta format with universe list + """ + universe = [] + for market in markets: + if not market.active: + continue + + trading_config = market.trading_config + sz_decimals = calculate_sz_decimals(trading_config.min_order_size_change) + + universe.append({ + "name": to_hyperliquid_market_name(market.name), + "szDecimals": sz_decimals, + "maxLeverage": int(trading_config.max_leverage), + "onlyIsolated": False, # Extended only supports cross margin + }) + + return {"universe": universe} + + @staticmethod + def transform_all_mids(markets: List[MarketModel]) -> Dict[str, str]: + """ + Transform Extended markets to mid prices dict. + + Args: + markets: List of Extended MarketModel (with stats) + + Returns: + Dict mapping coin name to mid price string + """ + mids = {} + for market in markets: + stats = market.market_stats + bid = stats.bid_price + ask = stats.ask_price + mid = (bid + ask) / 2 + + coin = to_hyperliquid_market_name(market.name) + mids[coin] = str(mid) + + return mids + + @staticmethod + def transform_l2_snapshot( + orderbook: OrderbookUpdateModel, + timestamp: Optional[int] = None, + ) -> Dict[str, Any]: + """ + Transform Extended orderbook to Hyperliquid l2_snapshot format. + + Args: + orderbook: Extended OrderbookUpdateModel + timestamp: Optional timestamp (defaults to current time) + + Returns: + Dict in Hyperliquid l2_snapshot format + """ + def transform_levels(levels: List[Any]) -> List[Dict[str, Any]]: + return [ + {"px": str(level.price), "sz": str(level.qty), "n": 1} + for level in levels + ] + + return { + "coin": to_hyperliquid_market_name(orderbook.market), + "levels": [ + transform_levels(orderbook.bid), # bids (index 0) + transform_levels(orderbook.ask), # asks (index 1) + ], + "time": timestamp or int(time.time() * 1000), + } + + @staticmethod + def transform_candles( + candles: List[CandleModel], + coin: str, + interval: str, + ) -> List[Dict[str, Any]]: + """ + Transform Extended candles to Hyperliquid format. + + Args: + candles: List of Extended CandleModel + coin: Coin name in Hyperliquid format (e.g., "BTC") + interval: Interval in Hyperliquid format (e.g., "1m") + + Returns: + List of candles in Hyperliquid format + """ + interval_ms = INTERVAL_MS.get(interval, 60000) + + return [ + { + "t": candle.timestamp, + "T": candle.timestamp + interval_ms, + "s": coin, + "i": interval, + "o": str(candle.open), + "c": str(candle.close), + "h": str(candle.high), + "l": str(candle.low), + "v": str(candle.volume) if candle.volume else "0", + "n": 0, # Number of trades (not available from Extended) + } + for candle in candles + ] + + @staticmethod + def transform_market_stats(stats: MarketStatsModel) -> Dict[str, Any]: + """ + Transform Extended market stats to a dict. + + Args: + stats: Extended MarketStatsModel + + Returns: + Dict with market statistics + """ + return { + "last_price": str(stats.last_price), + "mark_price": str(stats.mark_price), + "index_price": str(stats.index_price), + "bid_price": str(stats.bid_price), + "ask_price": str(stats.ask_price), + "funding_rate": str(stats.funding_rate), + "open_interest": str(stats.open_interest), + "daily_volume": str(stats.daily_volume), + "daily_price_change": str(stats.daily_price_change), + } diff --git a/extended/transformers/order.py b/extended/transformers/order.py new file mode 100644 index 0000000..899f45e --- /dev/null +++ b/extended/transformers/order.py @@ -0,0 +1,245 @@ +""" +Order and trade transformers. + +Converts Extended order and trade data to Hyperliquid format. +""" + +from decimal import Decimal +from typing import Any, Dict, List, Optional + +from x10.perpetual.orders import OpenOrderModel, PlacedOrderModel +from x10.perpetual.trades import AccountTradeModel + +from extended.utils.constants import SIDE_TO_HL +from extended.utils.helpers import to_hyperliquid_market_name + + +class OrderTransformer: + """Transform Extended order data to Hyperliquid format.""" + + @staticmethod + def transform_open_orders(orders: List[OpenOrderModel]) -> List[Dict[str, Any]]: + """ + Transform Extended orders to Hyperliquid open_orders format. + + Args: + orders: List of Extended OpenOrderModel + + Returns: + List of orders in Hyperliquid format + """ + return [ + OrderTransformer.transform_open_order(order) + for order in orders + ] + + @staticmethod + def transform_open_order(order: OpenOrderModel) -> Dict[str, Any]: + """ + Transform a single Extended order to Hyperliquid format. + + Args: + order: Extended OpenOrderModel + + Returns: + Dict in Hyperliquid open order format + """ + # Calculate remaining size + filled_qty = order.filled_qty if order.filled_qty else Decimal(0) + remaining_sz = order.qty - filled_qty + + # Handle both enum and string types for side + side_value = order.side.value if hasattr(order.side, 'value') else order.side + return { + "coin": to_hyperliquid_market_name(order.market), + "side": SIDE_TO_HL.get(side_value, "B"), + "limitPx": str(order.price), + "sz": str(remaining_sz), + "oid": order.id, + "timestamp": order.created_time, + "origSz": str(order.qty), + "cloid": order.external_id if order.external_id else None, + } + + @staticmethod + def transform_user_fills(trades: List[AccountTradeModel]) -> List[Dict[str, Any]]: + """ + Transform Extended trades to Hyperliquid user_fills format. + + Args: + trades: List of Extended AccountTradeModel + + Returns: + List of fills in Hyperliquid format + """ + return [ + OrderTransformer.transform_fill(trade) + for trade in trades + ] + + @staticmethod + def transform_fill(trade: AccountTradeModel) -> Dict[str, Any]: + """ + Transform a single Extended trade to Hyperliquid fill format. + + Note: The `cloid` field will be None because Extended's trades endpoint + does not include the client order ID (external_id). + + Args: + trade: Extended AccountTradeModel + + Returns: + Dict in Hyperliquid fill format + """ + # Handle both enum and string types for side and trade_type + side_value = trade.side.value if hasattr(trade.side, 'value') else trade.side + trade_type_value = trade.trade_type.value if hasattr(trade.trade_type, 'value') else trade.trade_type + return { + "coin": to_hyperliquid_market_name(trade.market), + "px": str(trade.price), + "sz": str(trade.qty), + "side": SIDE_TO_HL.get(side_value, "B"), + "time": trade.created_time, + "startPosition": "0", # Not available from Extended + "dir": "Trade", # Can't determine Open/Close from Extended trades + "closedPnl": "0", # Not in trade response + "hash": str(trade.id), + "oid": trade.order_id, + "crossed": trade.is_taker, + "fee": str(trade.fee), + "tid": trade.id, + "liquidation": trade_type_value == "LIQUIDATION", + "cloid": None, # Not available from Extended trades endpoint + } + + @staticmethod + def transform_order_response( + placed_order: PlacedOrderModel, + ) -> Dict[str, Any]: + """ + Transform Extended order placement response to Hyperliquid format. + + Args: + placed_order: Extended PlacedOrderModel + + Returns: + Dict in Hyperliquid order response format + """ + return { + "status": "ok", + "response": { + "type": "order", + "data": { + "statuses": [ + { + "resting": { + "oid": placed_order.id, + "cloid": placed_order.external_id, + } + } + ] + }, + }, + } + + @staticmethod + def transform_cancel_response( + success: bool = True, + order_id: Optional[int] = None, + ) -> Dict[str, Any]: + """ + Transform Extended cancel response to Hyperliquid format. + + Args: + success: Whether the cancel was successful + order_id: The order ID that was cancelled + + Returns: + Dict in Hyperliquid cancel response format + """ + if success: + return { + "status": "ok", + "response": { + "type": "cancel", + "data": { + "statuses": ["success"] + }, + }, + } + else: + return { + "status": "err", + "response": "Cancel failed", + } + + @staticmethod + def transform_error_response( + message: str, + ) -> Dict[str, Any]: + """ + Transform an error to Hyperliquid error format. + + Args: + message: Error message + + Returns: + Dict in Hyperliquid error response format + """ + return { + "status": "err", + "response": message, + } + + @staticmethod + def transform_bulk_orders_response( + results: List[Dict[str, Any]], + ) -> Dict[str, Any]: + """ + Transform bulk order results to Hyperliquid format. + + Args: + results: List of individual order results + + Returns: + Dict in Hyperliquid bulk order response format + """ + statuses = [] + for result in results: + if result.get("status") == "ok": + data = result.get("data", {}) + statuses.append({ + "resting": { + "oid": data.get("id"), + "cloid": data.get("external_id"), + } + }) + else: + statuses.append({ + "error": result.get("error", "Unknown error") + }) + + return { + "status": "ok", + "response": { + "type": "order", + "data": { + "statuses": statuses + }, + }, + } + + @staticmethod + def transform_leverage_response() -> Dict[str, Any]: + """ + Transform leverage update response to Hyperliquid format. + + Returns: + Dict in Hyperliquid leverage response format + """ + return { + "status": "ok", + "response": { + "type": "leverage", + }, + } diff --git a/extended/transformers_sync.py b/extended/transformers_sync.py new file mode 100644 index 0000000..b5a2f7b --- /dev/null +++ b/extended/transformers_sync.py @@ -0,0 +1,478 @@ +""" +Native Sync Transformers for Extended Exchange SDK. + +Converts raw API JSON responses (dict data) to Hyperliquid format. +No X10 model dependencies - works entirely with raw dicts. +""" + +import time +from decimal import Decimal +from typing import Any, Dict, List, Optional + + +# ============================================================================ +# CONSTANTS (duplicated to avoid X10 dependencies) +# ============================================================================ + +SIDE_TO_HL = { + "BUY": "B", + "SELL": "A", + "LONG": "B", + "SHORT": "A", +} + +INTERVAL_MS = { + "1m": 60000, + "5m": 300000, + "15m": 900000, + "30m": 1800000, + "1h": 3600000, + "2h": 7200000, + "4h": 14400000, + "1d": 86400000, +} + + +def normalize_market_name(name: str) -> str: + """Convert market name to Extended format (BTC -> BTC-USD).""" + if "-" not in name: + return f"{name}-USD" + return name + + +def to_hyperliquid_market_name(name: str) -> str: + """Convert Extended market name to Hyperliquid format (BTC-USD -> BTC).""" + return name.replace("-USD", "") + + +def calculate_sz_decimals(min_order_size_change) -> int: + """Calculate size decimals from minimum order size change.""" + if not min_order_size_change: + return 0 + val = Decimal(str(min_order_size_change)) + if val <= 0: + return 0 + return abs(int(val.log10())) + + +# ============================================================================ +# ACCOUNT TRANSFORMERS +# ============================================================================ + +class SyncAccountTransformer: + """Transform raw API account data to Hyperliquid format.""" + + @staticmethod + def transform_user_state( + balance_data: Dict[str, Any], + positions_data: List[Dict[str, Any]], + ) -> Dict[str, Any]: + """ + Transform raw balance + positions to Hyperliquid user_state format. + + Args: + balance_data: Raw balance dict from API + positions_data: Raw positions list from API + + Returns: + Dict in Hyperliquid user_state format + """ + # Extract balance fields with defaults + equity = balance_data.get("equity", "0") + balance = balance_data.get("balance", "0") + initial_margin = balance_data.get("initialMargin", balance_data.get("initial_margin", "0")) + available_for_trade = balance_data.get("availableForTrade", balance_data.get("available_for_trade", "0")) + + # Calculate total position value + total_position_value = Decimal("0") + for pos in positions_data: + val = pos.get("value", "0") + total_position_value += Decimal(str(val)) if val else Decimal("0") + + # Transform positions + asset_positions = [ + SyncAccountTransformer.transform_position(pos) + for pos in positions_data + ] + + return { + "assetPositions": asset_positions, + "crossMaintenanceMarginUsed": str(initial_margin), + "crossMarginSummary": { + "accountValue": str(equity), + "totalMarginUsed": str(initial_margin), + "totalNtlPos": str(total_position_value), + "totalRawUsd": str(balance), + }, + "marginSummary": { + "accountValue": str(equity), + "totalMarginUsed": str(initial_margin), + "totalNtlPos": str(total_position_value), + "totalRawUsd": str(balance), + "withdrawable": str(available_for_trade), + }, + "withdrawable": str(available_for_trade), + } + + @staticmethod + def transform_position(position: Dict[str, Any]) -> Dict[str, Any]: + """ + Transform raw position to Hyperliquid assetPosition format. + + Args: + position: Raw position dict from API + + Returns: + Dict in Hyperliquid assetPosition format + """ + market = position.get("market", "") + size = Decimal(str(position.get("size", "0"))) + side = position.get("side", "LONG") + leverage = int(position.get("leverage", 1)) + value = Decimal(str(position.get("value", "0"))) + open_price = position.get("openPrice", position.get("open_price", "0")) + unrealised_pnl = Decimal(str(position.get("unrealisedPnl", position.get("unrealised_pnl", "0")))) + liquidation_price = position.get("liquidationPrice", position.get("liquidation_price")) + + # Signed size: positive for LONG, negative for SHORT + szi = str(size) if side == "LONG" else str(-size) + + # Calculate margin and ROE + margin_used = value / leverage if leverage > 0 else Decimal("0") + roe = unrealised_pnl / margin_used if margin_used > 0 else Decimal("0") + + return { + "position": { + "coin": to_hyperliquid_market_name(market), + "szi": szi, + "leverage": {"type": "cross", "value": leverage}, + "entryPx": str(open_price), + "positionValue": str(value), + "unrealizedPnl": str(unrealised_pnl), + "liquidationPx": str(liquidation_price) if liquidation_price else None, + "marginUsed": str(margin_used), + "returnOnEquity": str(roe), + }, + "type": "oneWay", + } + + +# ============================================================================ +# MARKET TRANSFORMERS +# ============================================================================ + +class SyncMarketTransformer: + """Transform raw API market data to Hyperliquid format.""" + + @staticmethod + def transform_meta(markets_data: List[Dict[str, Any]]) -> Dict[str, Any]: + """ + Transform raw markets list to Hyperliquid meta format. + + Args: + markets_data: Raw markets list from API + + Returns: + Dict in Hyperliquid meta format with universe list + """ + universe = [] + for market in markets_data: + if not market.get("active", True): + continue + + trading_config = market.get("tradingConfig", market.get("trading_config", {})) + min_order_size_change = trading_config.get("minOrderSizeChange", trading_config.get("min_order_size_change", "0.001")) + max_leverage = trading_config.get("maxLeverage", trading_config.get("max_leverage", "50")) + sz_decimals = calculate_sz_decimals(min_order_size_change) + + # Handle max_leverage as string with decimal (e.g., "50.00") + try: + max_lev_int = int(float(str(max_leverage))) + except (ValueError, TypeError): + max_lev_int = 50 + + universe.append({ + "name": to_hyperliquid_market_name(market.get("name", "")), + "szDecimals": sz_decimals, + "maxLeverage": max_lev_int, + "onlyIsolated": False, + }) + + return {"universe": universe} + + @staticmethod + def transform_all_mids(markets_data: List[Dict[str, Any]]) -> Dict[str, str]: + """ + Transform raw markets to mid prices dict. + + Args: + markets_data: Raw markets list from API (with stats) + + Returns: + Dict mapping coin name to mid price string + """ + mids = {} + for market in markets_data: + stats = market.get("marketStats", market.get("market_stats", {})) + bid = Decimal(str(stats.get("bidPrice", stats.get("bid_price", "0")))) + ask = Decimal(str(stats.get("askPrice", stats.get("ask_price", "0")))) + mid = (bid + ask) / 2 if (bid and ask) else Decimal("0") + + coin = to_hyperliquid_market_name(market.get("name", "")) + mids[coin] = str(mid) + + return mids + + @staticmethod + def transform_l2_snapshot( + orderbook_data: Dict[str, Any], + timestamp: Optional[int] = None, + ) -> Dict[str, Any]: + """ + Transform raw orderbook to Hyperliquid l2_snapshot format. + + Args: + orderbook_data: Raw orderbook dict from API + timestamp: Optional timestamp (defaults to current time) + + Returns: + Dict in Hyperliquid l2_snapshot format + """ + def transform_levels(levels: List[Dict]) -> List[Dict[str, Any]]: + return [ + {"px": str(level.get("price", "0")), "sz": str(level.get("qty", "0")), "n": 1} + for level in levels + ] + + market = orderbook_data.get("market", "") + bids = orderbook_data.get("bid", orderbook_data.get("bids", [])) + asks = orderbook_data.get("ask", orderbook_data.get("asks", [])) + + return { + "coin": to_hyperliquid_market_name(market), + "levels": [ + transform_levels(bids), + transform_levels(asks), + ], + "time": timestamp or int(time.time() * 1000), + } + + @staticmethod + def transform_candles( + candles_data: List[Dict[str, Any]], + coin: str, + interval: str, + ) -> List[Dict[str, Any]]: + """ + Transform raw candles to Hyperliquid format. + + Args: + candles_data: Raw candles list from API + coin: Coin name in Hyperliquid format (e.g., "BTC") + interval: Interval in Hyperliquid format (e.g., "1m") + + Returns: + List of candles in Hyperliquid format + """ + interval_ms = INTERVAL_MS.get(interval, 60000) + + return [ + { + "t": candle.get("timestamp", 0), + "T": candle.get("timestamp", 0) + interval_ms, + "s": coin, + "i": interval, + "o": str(candle.get("open", "0")), + "c": str(candle.get("close", "0")), + "h": str(candle.get("high", "0")), + "l": str(candle.get("low", "0")), + "v": str(candle.get("volume", "0")), + "n": 0, + } + for candle in candles_data + ] + + +# ============================================================================ +# ORDER TRANSFORMERS +# ============================================================================ + +class SyncOrderTransformer: + """Transform raw API order data to Hyperliquid format.""" + + @staticmethod + def transform_open_orders(orders_data: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + """ + Transform raw orders to Hyperliquid open_orders format. + + Args: + orders_data: Raw orders list from API + + Returns: + List of orders in Hyperliquid format + """ + return [ + SyncOrderTransformer.transform_open_order(order) + for order in orders_data + ] + + @staticmethod + def transform_open_order(order: Dict[str, Any]) -> Dict[str, Any]: + """ + Transform a single raw order to Hyperliquid format. + + Args: + order: Raw order dict from API + + Returns: + Dict in Hyperliquid open order format + """ + qty = Decimal(str(order.get("qty", "0"))) + filled_qty = Decimal(str(order.get("filledQty", order.get("filled_qty", "0")))) + remaining_sz = qty - filled_qty + + side = order.get("side", "BUY") + market = order.get("market", "") + price = order.get("price", "0") + order_id = order.get("id", 0) + created_time = order.get("createdTime", order.get("created_time", 0)) + external_id = order.get("externalId", order.get("external_id")) + + return { + "coin": to_hyperliquid_market_name(market), + "side": SIDE_TO_HL.get(side, "B"), + "limitPx": str(price), + "sz": str(remaining_sz), + "oid": order_id, + "timestamp": created_time, + "origSz": str(qty), + "cloid": external_id, + } + + @staticmethod + def transform_user_fills(trades_data: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + """ + Transform raw trades to Hyperliquid user_fills format. + + Args: + trades_data: Raw trades list from API + + Returns: + List of fills in Hyperliquid format + """ + return [ + SyncOrderTransformer.transform_fill(trade) + for trade in trades_data + ] + + @staticmethod + def transform_fill(trade: Dict[str, Any]) -> Dict[str, Any]: + """ + Transform a single raw trade to Hyperliquid fill format. + + Args: + trade: Raw trade dict from API + + Returns: + Dict in Hyperliquid fill format + """ + market = trade.get("market", "") + side = trade.get("side", "BUY") + trade_type = trade.get("tradeType", trade.get("trade_type", "TRADE")) + is_taker = trade.get("isTaker", trade.get("is_taker", False)) + + return { + "coin": to_hyperliquid_market_name(market), + "px": str(trade.get("price", "0")), + "sz": str(trade.get("qty", "0")), + "side": SIDE_TO_HL.get(side, "B"), + "time": trade.get("createdTime", trade.get("created_time", 0)), + "startPosition": "0", + "dir": "Trade", + "closedPnl": "0", + "hash": str(trade.get("id", "")), + "oid": trade.get("orderId", trade.get("order_id", 0)), + "crossed": is_taker, + "fee": str(trade.get("fee", "0")), + "tid": trade.get("id", 0), + "liquidation": trade_type == "LIQUIDATION", + "cloid": None, + } + + @staticmethod + def transform_order_response(response_data: Dict[str, Any]) -> Dict[str, Any]: + """ + Transform raw order placement response to Hyperliquid format. + + Args: + response_data: Raw response dict from API + + Returns: + Dict in Hyperliquid order response format + """ + order_id = response_data.get("id", response_data.get("orderId", 0)) + external_id = response_data.get("externalId", response_data.get("external_id")) + + return { + "status": "ok", + "response": { + "type": "order", + "data": { + "statuses": [ + { + "resting": { + "oid": order_id, + "cloid": external_id, + } + } + ] + }, + }, + } + + @staticmethod + def transform_cancel_response(success: bool = True, order_id: Optional[int] = None) -> Dict[str, Any]: + """Transform cancel response to Hyperliquid format.""" + if success: + return { + "status": "ok", + "response": { + "type": "cancel", + "data": {"statuses": ["success"]}, + }, + } + return {"status": "err", "response": "Cancel failed"} + + @staticmethod + def transform_error_response(message: str) -> Dict[str, Any]: + """Transform an error to Hyperliquid error format.""" + return {"status": "err", "response": message} + + @staticmethod + def transform_bulk_orders_response(results: List[Dict[str, Any]]) -> Dict[str, Any]: + """Transform bulk order results to Hyperliquid format.""" + statuses = [] + for result in results: + if result.get("status") == "ok": + data = result.get("data", {}) + statuses.append({ + "resting": { + "oid": data.get("id"), + "cloid": data.get("external_id"), + } + }) + else: + statuses.append({"error": result.get("error", "Unknown error")}) + + return { + "status": "ok", + "response": { + "type": "order", + "data": {"statuses": statuses}, + }, + } + + @staticmethod + def transform_leverage_response() -> Dict[str, Any]: + """Transform leverage update response to Hyperliquid format.""" + return {"status": "ok", "response": {"type": "leverage"}} diff --git a/extended/types.py b/extended/types.py new file mode 100644 index 0000000..b0ba34d --- /dev/null +++ b/extended/types.py @@ -0,0 +1,326 @@ +""" +Type definitions for Extended Exchange SDK. + +Provides type aliases and dataclasses for the Hyperliquid-compatible interface. +""" + +from dataclasses import dataclass +from decimal import Decimal +from enum import Enum +from typing import Any, Dict, List, Literal, Optional, TypedDict, Union + + +# Re-export x10 types for internal use +from x10.perpetual.orders import ( + OrderSide, + OrderType as X10OrderType, + TimeInForce as X10TimeInForce, + OrderStatus, + SelfTradeProtectionLevel, +) +from x10.perpetual.positions import PositionSide, PositionStatus +from x10.perpetual.candles import CandleType, CandleInterval + + +class Side(str, Enum): + """Order side in Hyperliquid format.""" + + BUY = "B" + SELL = "A" + + @classmethod + def from_is_buy(cls, is_buy: bool) -> "Side": + """Convert is_buy boolean to Side.""" + return cls.BUY if is_buy else cls.SELL + + @classmethod + def from_x10_side(cls, side: Union[OrderSide, PositionSide, str]) -> "Side": + """Convert x10 side to Hyperliquid side.""" + side_str = str(side).upper() + if side_str in ("BUY", "LONG"): + return cls.BUY + return cls.SELL + + def to_is_buy(self) -> bool: + """Convert to is_buy boolean.""" + return self == Side.BUY + + +class TimeInForce(str, Enum): + """Time in force options in Hyperliquid format.""" + + GTC = "Gtc" # Good-till-cancel (maps to GTT in Extended) + IOC = "Ioc" # Immediate-or-cancel + ALO = "Alo" # Add-liquidity-only (post-only) + + def to_x10_tif(self) -> X10TimeInForce: + """Convert to x10 TimeInForce.""" + mapping = { + TimeInForce.GTC: X10TimeInForce.GTT, + TimeInForce.IOC: X10TimeInForce.IOC, + TimeInForce.ALO: X10TimeInForce.GTT, # ALO uses GTT with post_only=True + } + return mapping[self] + + @property + def is_post_only(self) -> bool: + """Check if this TIF implies post-only.""" + return self == TimeInForce.ALO + + +@dataclass +class LimitOrderType: + """Limit order type specification.""" + + tif: TimeInForce = TimeInForce.GTC + + +@dataclass +class OrderTypeSpec: + """ + Order type specification in Hyperliquid format. + + Example: + {"limit": {"tif": "Gtc"}} + {"limit": {"tif": "Ioc"}} + {"limit": {"tif": "Alo"}} + """ + + limit: LimitOrderType + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "OrderTypeSpec": + """Create from dictionary.""" + if "limit" in data: + limit_data = data["limit"] + tif = TimeInForce(limit_data.get("tif", "Gtc")) + return cls(limit=LimitOrderType(tif=tif)) + # Default to GTC limit + return cls(limit=LimitOrderType()) + + def to_dict(self) -> Dict[str, Any]: + """Convert to dictionary.""" + return {"limit": {"tif": self.limit.tif.value}} + + +@dataclass +class BuilderInfo: + """ + Builder fee information in Hyperliquid format. + + Attributes: + b: Builder ID as string + f: Fee in tenths of basis points (10 = 1 bps = 0.0001) + + Example: + BuilderInfo(b="123", f=10) # 1 bps fee to builder 123 + """ + + b: str # Builder ID + f: int # Fee in tenths of bps + + @classmethod + def from_dict(cls, data: Optional[Dict[str, Any]]) -> Optional["BuilderInfo"]: + """Create from dictionary.""" + if data is None: + return None + return cls(b=str(data["b"]), f=int(data["f"])) + + def to_dict(self) -> Dict[str, Any]: + """Convert to dictionary.""" + return {"b": self.b, "f": self.f} + + @property + def builder_id(self) -> int: + """Get builder ID as integer.""" + return int(self.b) + + @property + def fee_decimal(self) -> Decimal: + """ + Convert fee to decimal rate. + + f=1 -> 0.1 bps -> 0.000001 + f=10 -> 1 bps -> 0.0001 + f=50 -> 5 bps -> 0.0005 + """ + return Decimal(self.f) / Decimal(100000) + + +# Type aliases for Hyperliquid-compatible structures + +OrderRequest = TypedDict( + "OrderRequest", + { + "name": str, + "is_buy": bool, + "sz": float, + "limit_px": float, + "order_type": Dict[str, Any], + "reduce_only": bool, + "cloid": Optional[str], + "builder": Optional[Dict[str, Any]], + }, + total=False, +) + +CancelRequest = TypedDict( + "CancelRequest", + { + "coin": str, + "oid": Optional[int], + "cloid": Optional[str], + }, + total=False, +) + + +# Hyperliquid-format response types + +class LeverageInfo(TypedDict): + """Leverage information in Hyperliquid format.""" + + type: Literal["cross", "isolated"] + value: int + + +class PositionInfo(TypedDict, total=False): + """Position information in Hyperliquid format.""" + + coin: str + szi: str # Signed size (negative for short) + leverage: LeverageInfo + entryPx: str + positionValue: str + unrealizedPnl: str + liquidationPx: Optional[str] + marginUsed: str + returnOnEquity: str + maxTradeSz: str + + +class AssetPosition(TypedDict): + """Asset position wrapper in Hyperliquid format.""" + + position: PositionInfo + type: Literal["oneWay"] + + +class MarginSummary(TypedDict): + """Margin summary in Hyperliquid format.""" + + accountValue: str + totalMarginUsed: str + totalNtlPos: str + totalRawUsd: str + + +class UserState(TypedDict, total=False): + """User state response in Hyperliquid format.""" + + assetPositions: List[AssetPosition] + crossMaintenanceMarginUsed: str + crossMarginSummary: MarginSummary + marginSummary: Dict[str, str] + withdrawable: str + + +class OpenOrder(TypedDict, total=False): + """Open order in Hyperliquid format.""" + + coin: str + side: Literal["B", "A"] + limitPx: str + sz: str + oid: int + timestamp: int + origSz: str + cloid: Optional[str] + + +class Fill(TypedDict, total=False): + """Trade fill in Hyperliquid format.""" + + coin: str + px: str + sz: str + side: Literal["B", "A"] + time: int + startPosition: str + dir: str + closedPnl: str + hash: str + oid: int + crossed: bool + fee: str + tid: int + liquidation: bool + cloid: Optional[str] + + +class L2Level(TypedDict): + """Order book level in Hyperliquid format.""" + + px: str + sz: str + n: int + + +class L2Snapshot(TypedDict): + """Order book snapshot in Hyperliquid format.""" + + coin: str + levels: List[List[L2Level]] # [bids, asks] + time: int + + +class Candle(TypedDict): + """Candle data in Hyperliquid format.""" + + t: int # Open timestamp + T: int # Close timestamp + s: str # Symbol + i: str # Interval + o: str # Open + c: str # Close + h: str # High + l: str # Low + v: str # Volume + n: int # Number of trades + + +class UniverseItem(TypedDict): + """Market info in Hyperliquid universe format.""" + + name: str + szDecimals: int + maxLeverage: int + onlyIsolated: bool + + +class Meta(TypedDict): + """Exchange metadata in Hyperliquid format.""" + + universe: List[UniverseItem] + + +class OrderStatus(TypedDict, total=False): + """Order status in response.""" + + resting: Dict[str, Any] + filled: Dict[str, Any] + error: str + + +class OrderResponse(TypedDict): + """Order placement response in Hyperliquid format.""" + + status: Literal["ok", "err"] + response: Dict[str, Any] + + +class CancelResponse(TypedDict): + """Cancel response in Hyperliquid format.""" + + status: Literal["ok", "err"] + response: Dict[str, Any] diff --git a/extended/utils/__init__.py b/extended/utils/__init__.py new file mode 100644 index 0000000..ecab37a --- /dev/null +++ b/extended/utils/__init__.py @@ -0,0 +1,52 @@ +""" +Utility modules for Extended Exchange SDK. +""" + +from extended.utils.async_helpers import ( + get_current_loop, + thread_safe_gather, + thread_safe_wait_for, + thread_safe_create_task, + thread_safe_wait, +) +from extended.utils.constants import ( + INTERVAL_MAPPING, + INTERVAL_MAPPING_REVERSE, + INTERVAL_MS, + SIDE_MAPPING, + SIDE_TO_HL, + TIF_MAPPING, + CANDLE_TYPES, + DEFAULT_CANDLE_TYPE, +) +from extended.utils.helpers import ( + normalize_market_name, + to_hyperliquid_market_name, + run_sync, + parse_order_type, + parse_builder, +) + +__all__ = [ + # Async helpers + "get_current_loop", + "thread_safe_gather", + "thread_safe_wait_for", + "thread_safe_create_task", + "thread_safe_wait", + # Constants + "INTERVAL_MAPPING", + "INTERVAL_MAPPING_REVERSE", + "INTERVAL_MS", + "SIDE_MAPPING", + "SIDE_TO_HL", + "TIF_MAPPING", + "CANDLE_TYPES", + "DEFAULT_CANDLE_TYPE", + # Helpers + "normalize_market_name", + "to_hyperliquid_market_name", + "run_sync", + "parse_order_type", + "parse_builder", +] diff --git a/extended/utils/async_helpers.py b/extended/utils/async_helpers.py new file mode 100644 index 0000000..c2bf925 --- /dev/null +++ b/extended/utils/async_helpers.py @@ -0,0 +1,222 @@ +""" +Thread-safe async utilities for Extended SDK. + +Ensures all async operations use the correct event loop when called +from ThreadPoolExecutor contexts in Celery workers. +""" + +import asyncio +import threading +from typing import Any, Awaitable, List, Optional, Set, Tuple, TypeVar + +import nest_asyncio + +T = TypeVar("T") + +# Thread-local storage for event loops +_thread_local = threading.local() + + +def get_current_loop() -> asyncio.AbstractEventLoop: + """ + Get the current thread's event loop, creating one if needed. + + Thread-safe version that works with run_sync() from v1.0.1. + Ensures each thread has its own event loop to prevent + "Future attached to different loop" errors. + + Returns: + The current thread's event loop + """ + # Try to get running loop first + try: + return asyncio.get_running_loop() + except RuntimeError: + pass + + # Check thread-local storage + loop = getattr(_thread_local, "loop", None) + if loop is not None and not loop.is_closed(): + return loop + + # Check if we're in main thread + is_main_thread = threading.current_thread() is threading.main_thread() + + if not is_main_thread: + # Worker thread: create thread-local loop + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + nest_asyncio.apply(loop) + _thread_local.loop = loop + else: + # Main thread: use standard approach + try: + loop = asyncio.get_event_loop() + if loop.is_closed(): + raise RuntimeError("closed") + except RuntimeError: + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + nest_asyncio.apply(loop) + + return loop + + +async def thread_safe_gather( + *awaitables: Awaitable[Any], + return_exceptions: bool = False, +) -> List[Any]: + """ + Thread-safe version of asyncio.gather(). + + Ensures all tasks are created in the current thread's event loop + to prevent "Future attached to different loop" errors. + + Args: + *awaitables: Coroutines or awaitables to run concurrently + return_exceptions: If True, exceptions are returned as results + instead of being raised + + Returns: + List of results from all awaitables + """ + if not awaitables: + return [] + + # Get the current running loop (we're in async context) + current_loop = asyncio.get_running_loop() + + # Process each awaitable to ensure they're compatible with current loop + compatible_tasks = [] + for awaitable in awaitables: + if asyncio.iscoroutine(awaitable): + # Create task in current loop + task = current_loop.create_task(awaitable) + compatible_tasks.append(task) + elif hasattr(awaitable, '_loop') and awaitable._loop != current_loop: + # Task/Future from different loop - we need to recreate + # This is the critical fix for the "different loop" error + if hasattr(awaitable, '_coro') and awaitable._coro is not None: + # It's a Task with coroutine, recreate in current loop + task = current_loop.create_task(awaitable._coro) + compatible_tasks.append(task) + else: + # It's a Future or completed Task - try to await it directly + # This will raise the "different loop" error, so we catch and handle + try: + # If it's already done, get the result + if awaitable.done(): + if return_exceptions: + try: + result = awaitable.result() + compatible_tasks.append(asyncio.create_task(_return_result(result))) + except Exception as e: + compatible_tasks.append(asyncio.create_task(_return_result(e))) + else: + result = awaitable.result() + compatible_tasks.append(asyncio.create_task(_return_result(result))) + else: + # Not done and no coroutine - can't safely transfer + if return_exceptions: + error = RuntimeError(f"Cannot transfer pending future from different loop: {awaitable}") + compatible_tasks.append(asyncio.create_task(_return_result(error))) + else: + raise RuntimeError(f"Cannot transfer pending future from different loop: {awaitable}") + except Exception as e: + if return_exceptions: + compatible_tasks.append(asyncio.create_task(_return_result(e))) + else: + raise + else: + # Same loop or no loop attribute, use as-is + compatible_tasks.append(awaitable) + + return await asyncio.gather(*compatible_tasks, return_exceptions=return_exceptions) + + +async def _return_result(result: Any) -> Any: + """Helper coroutine to return a value asynchronously.""" + return result + + +async def thread_safe_wait_for( + awaitable: Awaitable[T], + timeout: Optional[float] = None, +) -> T: + """ + Thread-safe version of asyncio.wait_for(). + + Args: + awaitable: Coroutine or awaitable to run + timeout: Maximum time to wait (None for no timeout) + + Returns: + Result of the awaitable + + Raises: + asyncio.TimeoutError: If timeout is exceeded + """ + current_loop = asyncio.get_running_loop() + + if asyncio.iscoroutine(awaitable): + task = current_loop.create_task(awaitable) + else: + task = awaitable + + return await asyncio.wait_for(task, timeout=timeout) + + +def thread_safe_create_task( + coro: Awaitable[T], + *, + name: Optional[str] = None, +) -> asyncio.Task[T]: + """ + Thread-safe version of asyncio.create_task(). + + Creates a task in the current thread's event loop. + + Args: + coro: Coroutine to wrap in a task + name: Optional name for the task + + Returns: + The created task + """ + current_loop = get_current_loop() + + if name is not None: + return current_loop.create_task(coro, name=name) + else: + return current_loop.create_task(coro) + + +async def thread_safe_wait( + fs: Set[Awaitable[Any]], + *, + timeout: Optional[float] = None, + return_when: str = asyncio.ALL_COMPLETED, +) -> Tuple[Set[asyncio.Task[Any]], Set[asyncio.Task[Any]]]: + """ + Thread-safe version of asyncio.wait(). + + Args: + fs: Set of futures/coroutines to wait for + timeout: Maximum time to wait + return_when: When to return (ALL_COMPLETED, FIRST_COMPLETED, FIRST_EXCEPTION) + + Returns: + Tuple of (done, pending) task sets + """ + current_loop = asyncio.get_running_loop() + + # Ensure all futures are tasks in current loop + tasks = set() + for f in fs: + if asyncio.iscoroutine(f): + task = current_loop.create_task(f) + else: + task = f + tasks.add(task) + + return await asyncio.wait(tasks, timeout=timeout, return_when=return_when) diff --git a/extended/utils/constants.py b/extended/utils/constants.py new file mode 100644 index 0000000..6b61c1f --- /dev/null +++ b/extended/utils/constants.py @@ -0,0 +1,69 @@ +""" +Constants for Extended Exchange SDK. + +Provides mappings between Hyperliquid and Extended formats. +""" + +from x10.perpetual.orders import TimeInForce as X10TimeInForce + +# Time in force mapping (Hyperliquid -> Extended) +TIF_MAPPING = { + "Gtc": X10TimeInForce.GTT, + "Ioc": X10TimeInForce.IOC, + "Alo": X10TimeInForce.GTT, # ALO uses GTT with post_only=True +} + +# Side mapping (is_buy -> Extended OrderSide) +SIDE_MAPPING = { + True: "BUY", + False: "SELL", +} + +# Side to Hyperliquid format +SIDE_TO_HL = { + "BUY": "B", + "SELL": "A", + "LONG": "B", + "SHORT": "A", +} + +# Candle interval mapping (Hyperliquid -> Extended) +INTERVAL_MAPPING = { + "1m": "PT1M", + "5m": "PT5M", + "15m": "PT15M", + "30m": "PT30M", + "1h": "PT1H", + "2h": "PT2H", + "4h": "PT4H", + "1d": "P1D", +} + +# Reverse interval mapping (Extended -> Hyperliquid) +INTERVAL_MAPPING_REVERSE = {v: k for k, v in INTERVAL_MAPPING.items()} + +# Interval in milliseconds for close timestamp calculation +INTERVAL_MS = { + "1m": 60000, + "5m": 300000, + "15m": 900000, + "30m": 1800000, + "1h": 3600000, + "2h": 7200000, + "4h": 14400000, + "1d": 86400000, +} + +# Candle type options (Extended API path parameter) +CANDLE_TYPES = ["trades", "mark-prices", "index-prices"] +DEFAULT_CANDLE_TYPE = "trades" + +# Default order expiry (1 hour in milliseconds) +DEFAULT_ORDER_EXPIRY_MS = 3600000 + +# Default slippage for market orders +DEFAULT_SLIPPAGE = 0.05 # 5% + +# Price cap/floor for market orders (Extended constraint) +MARKET_ORDER_PRICE_CAP = 1.05 # Buy: mark * 1.05 +MARKET_ORDER_PRICE_FLOOR = 0.95 # Sell: mark * 0.95 diff --git a/extended/utils/helpers.py b/extended/utils/helpers.py new file mode 100644 index 0000000..560c4c4 --- /dev/null +++ b/extended/utils/helpers.py @@ -0,0 +1,283 @@ +""" +Helper utilities for Extended Exchange SDK. +""" + +import asyncio +import threading +from decimal import Decimal +from functools import wraps +from typing import Any, Callable, Coroutine, Dict, Optional, Tuple, TypeVar + +import nest_asyncio + +from x10.perpetual.orders import TimeInForce as X10TimeInForce + +from extended.utils.constants import TIF_MAPPING + +T = TypeVar("T") + +# Thread-local storage for event loops +_thread_local = threading.local() + + +def normalize_market_name(name: str) -> str: + """ + Normalize market name to Extended format. + + Hyperliquid uses: "BTC", "ETH" + Extended uses: "BTC-USD", "ETH-USD" + + Args: + name: Market name in either format + + Returns: + Market name in Extended format (e.g., "BTC-USD") + """ + if "-" not in name: + return f"{name}-USD" + return name + + +def to_hyperliquid_market_name(name: str) -> str: + """ + Convert Extended market name to Hyperliquid format. + + "BTC-USD" -> "BTC" + + Args: + name: Market name in Extended format + + Returns: + Market name in Hyperliquid format (e.g., "BTC") + """ + return name.replace("-USD", "") + + +def run_sync(coro_or_factory) -> T: + """ + Run an async coroutine synchronously with nuclear-level thread safety. + + This version uses adaptive isolation strategies to completely eliminate + "Future attached to different loop" errors in production environments. + + Strategies (in order of preference): + 1. Thread isolation for ThreadPoolExecutor contexts + 2. Standard approach for main thread contexts + 3. Process isolation as ultimate fallback + + Args: + coro_or_factory: Either a coroutine object or a callable that returns a coroutine + + Returns: + The result of the coroutine + """ + # Handle callable factories to prevent coroutine reuse + if callable(coro_or_factory): + coro = coro_or_factory() + else: + coro = coro_or_factory + # Quick check if we're in a ThreadPoolExecutor (production scenario) + current_thread = threading.current_thread() + is_threadpool = ("ThreadPoolExecutor" in current_thread.name or + "CrossEx" in current_thread.name or + "Worker" in current_thread.name) + + if is_threadpool: + # Production ThreadPoolExecutor context - use nuclear isolation + return _run_sync_thread_isolated(coro_or_factory) + + # Try standard approach first for non-ThreadPoolExecutor contexts + try: + # Check for running loop first (async context) + try: + running_loop = asyncio.get_running_loop() + # We're inside an async context - use nest_asyncio + nest_asyncio.apply(running_loop) + return running_loop.run_until_complete(coro) + except RuntimeError: + # No running loop - expected case for sync contexts + pass + + # Standard event loop approach + is_main_thread = threading.current_thread() is threading.main_thread() + + if not is_main_thread: + # Worker thread: use thread-local event loop + loop = getattr(_thread_local, "loop", None) + if loop is None or loop.is_closed(): + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + nest_asyncio.apply(loop) + _thread_local.loop = loop + else: + # Main thread: use standard approach + try: + loop = asyncio.get_event_loop() + if loop.is_closed(): + raise RuntimeError("closed") + except RuntimeError: + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + nest_asyncio.apply(loop) + + return loop.run_until_complete(coro) + + except RuntimeError as e: + if "attached to a different loop" in str(e): + # This is the exact error we're trying to fix - use nuclear isolation + return _run_sync_thread_isolated(coro_or_factory) + else: + # Other RuntimeError - re-raise + raise + + +def _run_sync_thread_isolated(coro_or_factory) -> T: + """ + Nuclear option: Run coroutine in completely isolated thread. + + This creates a dedicated thread with its own event loop, + completely eliminating any possibility of loop conflicts. + """ + import concurrent.futures + + result = None + exception = None + + def isolated_runner(): + nonlocal result, exception + try: + # Create completely isolated loop in this thread + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + + try: + # Handle callable factories to create fresh coroutine in isolated thread + if callable(coro_or_factory): + coro = coro_or_factory() + else: + coro = coro_or_factory + result = loop.run_until_complete(asyncio.wait_for(coro, timeout=25)) + except asyncio.TimeoutError: + exception = TimeoutError("Extended SDK operation timed out after 25 seconds") + except Exception as e: + exception = e + finally: + try: + loop.close() + except: + pass + + except Exception as e: + exception = e + + # Run in dedicated thread with timeout + with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor: + future = executor.submit(isolated_runner) + try: + future.result(timeout=30) # 30 second total timeout + except concurrent.futures.TimeoutError: + raise TimeoutError("Extended SDK operation timed out after 30 seconds") + + if exception: + raise exception + + return result + + +def sync_wrapper( + async_method: Callable[..., Coroutine[Any, Any, T]] +) -> Callable[..., T]: + """ + Decorator to create a synchronous version of an async method. + + Usage: + class AsyncInfoAPI: + async def user_state(self) -> Dict: + ... + + class InfoAPI: + def __init__(self, async_api: AsyncInfoAPI): + self._async = async_api + + @sync_wrapper + async def user_state(self) -> Dict: + return await self._async.user_state() + """ + + @wraps(async_method) + def wrapper(*args: Any, **kwargs: Any) -> T: + return run_sync(async_method(*args, **kwargs)) + + return wrapper + + +def parse_order_type(order_type: Dict[str, Any]) -> Tuple[X10TimeInForce, bool]: + """ + Parse Hyperliquid order_type to Extended params. + + Args: + order_type: Hyperliquid order type dict + {"limit": {"tif": "Gtc"}} - Good-till-cancel + {"limit": {"tif": "Ioc"}} - Immediate-or-cancel + {"limit": {"tif": "Alo"}} - Add-liquidity-only (post-only) + + Returns: + Tuple of (TimeInForce, post_only) + """ + if "limit" in order_type: + tif = order_type["limit"].get("tif", "Gtc") + post_only = tif == "Alo" + return TIF_MAPPING.get(tif, X10TimeInForce.GTT), post_only + return X10TimeInForce.GTT, False + + +def parse_builder( + builder: Optional[Dict[str, Any]] +) -> Tuple[Optional[int], Optional[Decimal]]: + """ + Parse Hyperliquid builder format to Extended params. + + Hyperliquid: {"b": "123", "f": 10} + b = builder_id as string + f = fee in tenths of basis points + + Extended: builder_id (int), builder_fee (Decimal rate) + + Conversion: + f=1 -> 0.1 bps -> 0.000001 + f=10 -> 1 bps -> 0.0001 + f=50 -> 5 bps -> 0.0005 + + Args: + builder: Builder dict in Hyperliquid format or None + + Returns: + Tuple of (builder_id, builder_fee) or (None, None) + """ + if builder is None: + return None, None + + # Parse builder_id from string to int + builder_id = int(builder["b"]) + + # Convert tenths of bps to decimal rate + # f / 100000 = decimal rate + fee_tenths_bps = builder.get("f", 0) + builder_fee = Decimal(fee_tenths_bps) / Decimal(100000) + + return builder_id, builder_fee + + +def calculate_sz_decimals(min_order_size_change: Decimal) -> int: + """ + Calculate size decimals from minimum order size change. + + Args: + min_order_size_change: Minimum order size change (e.g., 0.001) + + Returns: + Number of decimal places (e.g., 3) + """ + if min_order_size_change <= 0: + return 0 + return abs(int(min_order_size_change.log10())) diff --git a/poetry.lock b/poetry.lock index 361f814..95fdfac 100644 --- a/poetry.lock +++ b/poetry.lock @@ -700,6 +700,9 @@ groups = ["main"] files = [ {file = "ckzg-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:49ee4c830de89764bfd9e8188446f3020f14d32bd4486fcbc5a4a5afad775ac0"}, {file = "ckzg-2.1.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3b4f0c6c2f1a629d4d64e900c65633595c63d208001d588c61b6c8bc1b189dec"}, + {file = "ckzg-2.1.5-cp310-cp310-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:10c8bc524267a40fe7c4fabd4c23f131ea18fcabd6016cdc4ddcb95cc757faf5"}, + {file = "ckzg-2.1.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8ea589e60db460ee9ebb678f20e74cc9289e912ccad66693b3263459933aaffc"}, + {file = "ckzg-2.1.5-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:97769b53f7d8c46e794d5c8aa609a4c00ec1fb050e69b6833b45dbb23a7b6501"}, {file = "ckzg-2.1.5-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a45aaea4a42babea48bb27e387fb209f2aaaaaa16abea25a4a92a056b616f9af"}, {file = "ckzg-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:060562273057911c39a1491e9b76055c095c10cfff1704ed70011e38b53f83d8"}, {file = "ckzg-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f12a90277b17e1cb5c326c5c261dad2ebb14a7136e754593e3a0a92c94799fc1"}, @@ -709,6 +712,9 @@ files = [ {file = "ckzg-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:2b7ef12896e2afff613f058e3bc8e3478ff626ae8a6f2d3200950304a536935f"}, {file = "ckzg-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cead4ba760a49eaa4d7a50a0483aad9727d6103fc00c408aef15f2cd8f8dec7b"}, {file = "ckzg-2.1.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3156983ba598fa05f0136325125e75197e4cf24ded255aaa6ace068cede92932"}, + {file = "ckzg-2.1.5-cp311-cp311-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:cac601a9690f133dd9d8e85f7a96578496427d42cdea771e0e07785b1cbbe9dc"}, + {file = "ckzg-2.1.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:05860f1477880376106a6934becdcb3a2c6330fc2386fed0d7e8f3b0ce5df81c"}, + {file = "ckzg-2.1.5-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:92b18b0ec177b9e2b4238936a8bffcfdaee7626a58f8d0c7c2ac554b8a05c9b6"}, {file = "ckzg-2.1.5-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d05e2c9466b2a4214dc19da35ea4cae636e033f3434768b982d37317a0f9c520"}, {file = "ckzg-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c754bbc253cfce8814d633f135be4891e6f83a50125f418fee01323ba306f59a"}, {file = "ckzg-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2b766d4aed52c8c717322f2af935da0b916bf59fbba771adb822499b45e491"}, @@ -718,6 +724,9 @@ files = [ {file = "ckzg-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:ce2047071353ee099d44aa6575974648663204eb9b42354bfa5ac6f9b8fb63e9"}, {file = "ckzg-2.1.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:edead535bd9afef27b8650bba09659debd4f52638aee5ec1ab7d2c9d7e86953c"}, {file = "ckzg-2.1.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dc78622855de3d47767cdeecfdf58fd58911f43a0fa783524e414b7e75149020"}, + {file = "ckzg-2.1.5-cp312-cp312-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:e5639064b0dd147b73f2ce2c2506844b0c625b232396ac852dc52eced04bd529"}, + {file = "ckzg-2.1.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb0864813902b96cde171e65334ce8d13c5ff5b6855f2e71a2272ae268fa07e8"}, + {file = "ckzg-2.1.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e6f13f673a24c01e681eb66aed8f8e4ce191f009dd2149f3e1b9ad0dd59b4cd"}, {file = "ckzg-2.1.5-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:094add5f197a3d278924ec1480d258f3b8b0e9f8851ae409eec83a21a738bffe"}, {file = "ckzg-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b4b05f798784400e8c4dedaf1a1d57bbbc54de790855855add876fff3c9f629"}, {file = "ckzg-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64aef50a1cf599041b9af018bc885a3fad6a20bbaf443fc45f0457cb47914610"}, @@ -727,6 +736,9 @@ files = [ {file = "ckzg-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:827be2aeffc8a10bfb39b8dad45def82164dfcde735818c4053f5064474ae1b4"}, {file = "ckzg-2.1.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0d955f4e18bb9a9b3a6f55114052edd41650c29edd5f81e417c8f01abace8207"}, {file = "ckzg-2.1.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0c0961a685761196264aa49b1cf06e8a2b2add4d57987853d7dd7a7240dc5de7"}, + {file = "ckzg-2.1.5-cp313-cp313-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:026ef3bba0637032c21f6bdb8e92aefeae7c67003bf631a4ee80c515a36a9dbd"}, + {file = "ckzg-2.1.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf031139a86e4ff00a717f9539331ef148ae9013b58848f2a7ac14596d812915"}, + {file = "ckzg-2.1.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f51339d58541ae450c78a509b32822eec643595d8b96949fb1963fba802dc78b"}, {file = "ckzg-2.1.5-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:badb1c7dc6b932bed2c3f7695e1ce3e4bcc9601706136957408ac2bde5dd0892"}, {file = "ckzg-2.1.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58d92816b9babaee87bd9f23be10c07d5d07c709be184aa7ea08ddb2bcf2541c"}, {file = "ckzg-2.1.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cf39f9abe8b3f1a71188fb601a8589672ee40eb0671fc36d8cdf4e78f00f43f"}, @@ -734,6 +746,24 @@ files = [ {file = "ckzg-2.1.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c39a1c7b32ac345cc44046076fd069ad6b7e6f7bef230ef9be414c712c4453b8"}, {file = "ckzg-2.1.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e4564765b0cc65929eca057241b9c030afac1dbae015f129cb60ca6abd6ff620"}, {file = "ckzg-2.1.5-cp313-cp313-win_amd64.whl", hash = "sha256:55013b36514b8176197655b929bc53f020aa51a144331720dead2efc3793ed85"}, + {file = "ckzg-2.1.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a0cab7deaed093898a92d3644d4ca8621b63cb49296833e2d8b3edac456656d5"}, + {file = "ckzg-2.1.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:caedc9eba3d28584be9b6051585f20745f6abfec0d0657cce3dd45edb7f28586"}, + {file = "ckzg-2.1.5-cp314-cp314-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:2f67e545d41ba960189b1011d078953311259674620c485e619c933494b88fd9"}, + {file = "ckzg-2.1.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d6f65ff296033c259d0829093d2c55bb45651e001e0269b8b88d072fdc86ecc6"}, + {file = "ckzg-2.1.5-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d66d34ff33be94c8a1f0da86483cd5bfdc15842998f3654ed91b8fdbffa2a81"}, + {file = "ckzg-2.1.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:25cf954bae3e2b2db6fa5e811d9800f89199d3eb4fa906c96a1c03434d4893c9"}, + {file = "ckzg-2.1.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:34d7128735e0bcfcac876bff47d0f85e674f1e24f99014e326ec266abed7a82c"}, + {file = "ckzg-2.1.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1dec3efae8679f7b8e26263b8bb0d3061ef4c9c6fe395e55b71f8f0df90ca8a0"}, + {file = "ckzg-2.1.5-cp314-cp314-win_amd64.whl", hash = "sha256:ce37c0ee0effe55d4ceed1735a2d85a3556a86238f3c89b7b7d1ca4ce4e92358"}, + {file = "ckzg-2.1.5-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:db804d27f4b08e3aea440cdc6558af4ceb8256b18ea2b83681d80cc654a4085b"}, + {file = "ckzg-2.1.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d472e3beeb95a110275b4d27e51d1c2b26ab99ddb91ac1c5587d710080c39c5e"}, + {file = "ckzg-2.1.5-cp314-cp314t-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:4b44a018124a79138fab8fde25221083574c181c324519be51eab09b1e43ae27"}, + {file = "ckzg-2.1.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6a91d7b444300cf8ecae4f55983726630530cdde15cab92023026230a30d094e"}, + {file = "ckzg-2.1.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8674c64efbf2a12edf6d776061847bbe182997737e7690a69af932ce61a9c2a"}, + {file = "ckzg-2.1.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:4290aa17c6402c98f16017fd6ee0bff8aeb5c97be5c3cee7c72aea1b7d176f3a"}, + {file = "ckzg-2.1.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a0f82b8958ea97df12e29094f0a672cbe7532399724ea61b2399545991ed6017"}, + {file = "ckzg-2.1.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:22300bf0d717a083c388de5cfafec08443c9938b3abde2e89f9d5d1fffde1c51"}, + {file = "ckzg-2.1.5-cp314-cp314t-win_amd64.whl", hash = "sha256:aa8228206c3e3729fc117ca38e27588c079b0928a5ab628ee4d9fccaa2b8467d"}, {file = "ckzg-2.1.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:44d585f756ab223e34ac80ae04be7969cb364ee250a91f9b2b1dae37e1f3020a"}, {file = "ckzg-2.1.5-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ecade6a3aee63dffc8e8d4adba838460b40f9b29d46ffd9f4d4502261fbcddff"}, {file = "ckzg-2.1.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8548de14e6e53271b246c7dc0bf843030b7f2144edb9ea73c68f46174a2bacd6"}, @@ -752,6 +782,9 @@ files = [ {file = "ckzg-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:d2fed86e47399b06b564c8d3715a3ccec5d3a0a63326227a34e15515b8c514db"}, {file = "ckzg-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b1b52d359013b551b85fff538d2ef12763abd87efbc544d6f2808b9dd6bf0a4b"}, {file = "ckzg-2.1.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4cfe1cacea729c06196dcecec9c38f9b59bb7eadce51145e7ee27de10854dd59"}, + {file = "ckzg-2.1.5-cp38-cp38-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:86233ccbb0bcaf353990ce2a8e24f1aa37782272e64ca9b55dd45895829e4980"}, + {file = "ckzg-2.1.5-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3449126ee416b438c22cd7b7620e8f030c9ba7e030a80ebbd5924f04bc95905"}, + {file = "ckzg-2.1.5-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a687c0609de11d4eba5a982036fd77d21d35841effb468a41004c68ad13a7438"}, {file = "ckzg-2.1.5-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d22cef6551dee8d05151cc5184c37b190101b2027c0851301393561c559c669"}, {file = "ckzg-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2867e4a49f19248644206e82f5b8795e22096722dcd1e21acdad133e87632d5c"}, {file = "ckzg-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7e7fd0c4b4d2af5661e3d54648c0447d33f17cbafa5dd1b0576899864b5b7da"}, @@ -761,6 +794,9 @@ files = [ {file = "ckzg-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:15e0f7342a451569fa427c6ad3cb992975462c52c3ecdc2bd7c3ed35847bbb8b"}, {file = "ckzg-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3773ccdb3501ff3779988aa97e5b15629d58ac02281f186030f66d2fc2b4b7ec"}, {file = "ckzg-2.1.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2c9b798c6eb4db9cf82272e5a5c62be86f0d435206c6c49cc078cbb67ebd51bd"}, + {file = "ckzg-2.1.5-cp39-cp39-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:13c0630363a65182e99d064f7eb173195dcbdddc4048fd5b45cd0a3cd0c740f9"}, + {file = "ckzg-2.1.5-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:165efe1fac474ae58a26b742f910c0c90c01fc356aac8b680db2e02e44005adf"}, + {file = "ckzg-2.1.5-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fa231a1965be1a9c6fa50528132b71f1bc486335564baf6ab6d98aebedfb03d7"}, {file = "ckzg-2.1.5-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3ea32c21f71b786ea04b62cbe982b600da5e6f180b1d256fc9e397074041a6d"}, {file = "ckzg-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10455cc15e769a749c19fd3031dd0149eb92c2f9b4a054117cb20327242fd920"}, {file = "ckzg-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b21c38740aa5fcdc0cacfe9eda82cbf7bdffc743fa85344495bfecc18619d7d6"}, @@ -2138,6 +2174,18 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "nest-asyncio" +version = "1.6.0" +description = "Patch asyncio to allow nested event loops" +optional = false +python-versions = ">=3.5" +groups = ["main"] +files = [ + {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, + {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, +] + [[package]] name = "nltk" version = "3.9.2" @@ -2238,134 +2286,134 @@ testing = ["coverage", "pytest", "pytest-benchmark"] [[package]] name = "propcache" -version = "0.4.0" +version = "0.4.1" description = "Accelerated property cache" optional = false python-versions = ">=3.9" groups = ["main", "dev"] files = [ - {file = "propcache-0.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:779aaae64089e2f4992e993faea801925395d26bb5de4a47df7ef7f942c14f80"}, - {file = "propcache-0.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566552ed9b003030745e5bc7b402b83cf3cecae1bade95262d78543741786db5"}, - {file = "propcache-0.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:944de70384c62d16d4a00c686b422aa75efbc67c4addaebefbb56475d1c16034"}, - {file = "propcache-0.4.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e878553543ece1f8006d0ba4d096b40290580db173bfb18e16158045b9371335"}, - {file = "propcache-0.4.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8659f995b19185179474b18de8755689e1f71e1334d05c14e1895caa4e409cf7"}, - {file = "propcache-0.4.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7aa8cc5c94e682dce91cb4d12d7b81c01641f4ef5b3b3dc53325d43f0e3b9f2e"}, - {file = "propcache-0.4.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da584d917a1a17f690fc726617fd2c3f3006ea959dae5bb07a5630f7b16f9f5f"}, - {file = "propcache-0.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:892a072e5b19c3f324a4f8543c9f7e8fc2b0aa08579e46f69bdf0cfc1b440454"}, - {file = "propcache-0.4.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c20d796210720455086ef3f85adc413d1e41d374742f9b439354f122bbc3b528"}, - {file = "propcache-0.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:df7107a91126a495880576610ae989f19106e1900dd5218d08498391fa43b31d"}, - {file = "propcache-0.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:0b04ac2120c161416c866d0b6a4259e47e92231ff166b518cc0efb95777367c3"}, - {file = "propcache-0.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1e7fa29c71ffa8d6a37324258737d09475f84715a6e8c350f67f0bc8e5e44993"}, - {file = "propcache-0.4.0-cp310-cp310-win32.whl", hash = "sha256:01c0ebc172ca28e9d62876832befbf7f36080eee6ed9c9e00243de2a8089ad57"}, - {file = "propcache-0.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:84f847e64f4d1a232e50460eebc1196642ee9b4c983612f41cd2d44fd2fe7c71"}, - {file = "propcache-0.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:2166466a666a5bebc332cd209cad77d996fad925ca7e8a2a6310ba9e851ae641"}, - {file = "propcache-0.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6a6a36b94c09711d6397d79006ca47901539fbc602c853d794c39abd6a326549"}, - {file = "propcache-0.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:da47070e1340a1639aca6b1c18fe1f1f3d8d64d3a1f9ddc67b94475f44cd40f3"}, - {file = "propcache-0.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:de536cf796abc5b58d11c0ad56580215d231d9554ea4bb6b8b1b3bed80aa3234"}, - {file = "propcache-0.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f5c82af8e329c3cdc3e717dd3c7b2ff1a218b6de611f6ce76ee34967570a9de9"}, - {file = "propcache-0.4.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:abe04e7aa5ab2e4056fcf3255ebee2071e4a427681f76d4729519e292c46ecc1"}, - {file = "propcache-0.4.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:075ca32384294434344760fdcb95f7833e1d7cf7c4e55f0e726358140179da35"}, - {file = "propcache-0.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:626ec13592928b677f48ff5861040b604b635e93d8e2162fb638397ea83d07e8"}, - {file = "propcache-0.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:02e071548b6a376e173b0102c3f55dc16e7d055b5307d487e844c320e38cacf2"}, - {file = "propcache-0.4.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:2af6de831a26f42a3f94592964becd8d7f238551786d7525807f02e53defbd13"}, - {file = "propcache-0.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:bd6c6dba1a3b8949e08c4280071c86e38cb602f02e0ed6659234108c7a7cd710"}, - {file = "propcache-0.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:783e91595cf9b66c2deda17f2e8748ae8591aa9f7c65dcab038872bfe83c5bb1"}, - {file = "propcache-0.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c3f4b125285d354a627eb37f3ea7c13b8842c7c0d47783581d0df0e272dbf5f0"}, - {file = "propcache-0.4.0-cp311-cp311-win32.whl", hash = "sha256:71c45f02ffbb8a21040ae816ceff7f6cd749ffac29fc0f9daa42dc1a9652d577"}, - {file = "propcache-0.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:7d51f70f77950f8efafed4383865d3533eeee52d8a0dd1c35b65f24de41de4e0"}, - {file = "propcache-0.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:858eaabd2191dd0da5272993ad08a748b5d3ae1aefabea8aee619b45c2af4a64"}, - {file = "propcache-0.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:381c84a445efb8c9168f1393a5a7c566de22edc42bfe207a142fff919b37f5d9"}, - {file = "propcache-0.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5a531d29d7b873b12730972237c48b1a4e5980b98cf21b3f09fa4710abd3a8c3"}, - {file = "propcache-0.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cd6e22255ed73efeaaeb1765505a66a48a9ec9ebc919fce5ad490fe5e33b1555"}, - {file = "propcache-0.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d9a8d277dc218ddf04ec243a53ac309b1afcebe297c0526a8f82320139b56289"}, - {file = "propcache-0.4.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:399c73201d88c856a994916200d7cba41d7687096f8eb5139eb68f02785dc3f7"}, - {file = "propcache-0.4.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a1d5e474d43c238035b74ecf997f655afa67f979bae591ac838bb3fbe3076392"}, - {file = "propcache-0.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22f589652ee38de96aa58dd219335604e09666092bc250c1d9c26a55bcef9932"}, - {file = "propcache-0.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e5227da556b2939da6125cda1d5eecf9e412e58bc97b41e2f192605c3ccbb7c2"}, - {file = "propcache-0.4.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:92bc43a1ab852310721ce856f40a3a352254aa6f5e26f0fad870b31be45bba2e"}, - {file = "propcache-0.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:83ae2f5343f6f06f4c91ae530d95f56b415f768f9c401a5ee2a10459cf74370b"}, - {file = "propcache-0.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:077a32977399dc05299b16e793210341a0b511eb0a86d1796873e83ce47334cc"}, - {file = "propcache-0.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:94a278c45e6463031b5a8278e40a07edf2bcc3b5379510e22b6c1a6e6498c194"}, - {file = "propcache-0.4.0-cp312-cp312-win32.whl", hash = "sha256:4c491462e1dc80f9deb93f428aad8d83bb286de212837f58eb48e75606e7726c"}, - {file = "propcache-0.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cdb0cecafb528ab15ed89cdfed183074d15912d046d3e304955513b50a34b907"}, - {file = "propcache-0.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:b2f29697d1110e8cdf7a39cc630498df0082d7898b79b731c1c863f77c6e8cfc"}, - {file = "propcache-0.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e2d01fd53e89cb3d71d20b8c225a8c70d84660f2d223afc7ed7851a4086afe6d"}, - {file = "propcache-0.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7dfa60953169d2531dd8ae306e9c27c5d4e5efe7a2ba77049e8afdaece062937"}, - {file = "propcache-0.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:227892597953611fce2601d49f1d1f39786a6aebc2f253c2de775407f725a3f6"}, - {file = "propcache-0.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5e0a5bc019014531308fb67d86066d235daa7551baf2e00e1ea7b00531f6ea85"}, - {file = "propcache-0.4.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6ebc6e2e65c31356310ddb6519420eaa6bb8c30fbd809d0919129c89dcd70f4c"}, - {file = "propcache-0.4.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1927b78dd75fc31a7fdc76cc7039e39f3170cb1d0d9a271e60f0566ecb25211a"}, - {file = "propcache-0.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5b113feeda47f908562d9a6d0e05798ad2f83d4473c0777dafa2bc7756473218"}, - {file = "propcache-0.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4596c12aa7e3bb2abf158ea8f79eb0fb4851606695d04ab846b2bb386f5690a1"}, - {file = "propcache-0.4.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:6d1f67dad8cc36e8abc2207a77f3f952ac80be7404177830a7af4635a34cbc16"}, - {file = "propcache-0.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e6229ad15366cd8b6d6b4185c55dd48debf9ca546f91416ba2e5921ad6e210a6"}, - {file = "propcache-0.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:2a4bf309d057327f1f227a22ac6baf34a66f9af75e08c613e47c4d775b06d6c7"}, - {file = "propcache-0.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c2e274f3d1cbb2ddcc7a55ce3739af0f8510edc68a7f37981b2258fa1eedc833"}, - {file = "propcache-0.4.0-cp313-cp313-win32.whl", hash = "sha256:f114a3e1f8034e2957d34043b7a317a8a05d97dfe8fddb36d9a2252c0117dbbc"}, - {file = "propcache-0.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:9ba68c57cde9c667f6b65b98bc342dfa7240b1272ffb2c24b32172ee61b6d281"}, - {file = "propcache-0.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:eb77a85253174bf73e52c968b689d64be62d71e8ac33cabef4ca77b03fb4ef92"}, - {file = "propcache-0.4.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:c0e1c218fff95a66ad9f2f83ad41a67cf4d0a3f527efe820f57bde5fda616de4"}, - {file = "propcache-0.4.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:5710b1c01472542bb024366803812ca13e8774d21381bcfc1f7ae738eeb38acc"}, - {file = "propcache-0.4.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d7f008799682e8826ce98f25e8bc43532d2cd26c187a1462499fa8d123ae054f"}, - {file = "propcache-0.4.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0596d2ae99d74ca436553eb9ce11fe4163dc742fcf8724ebe07d7cb0db679bb1"}, - {file = "propcache-0.4.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ab9c1bd95ebd1689f0e24f2946c495808777e9e8df7bb3c1dfe3e9eb7f47fe0d"}, - {file = "propcache-0.4.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a8ef2ea819549ae2e8698d2ec229ae948d7272feea1cb2878289f767b6c585a4"}, - {file = "propcache-0.4.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:71a400b2f0b079438cc24f9a27f02eff24d8ef78f2943f949abc518b844ade3d"}, - {file = "propcache-0.4.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4c2735d3305e6cecab6e53546909edf407ad3da5b9eeaf483f4cf80142bb21be"}, - {file = "propcache-0.4.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:72b51340047ac43b3cf388eebd362d052632260c9f73a50882edbb66e589fd44"}, - {file = "propcache-0.4.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:184c779363740d6664982ad05699f378f7694220e2041996f12b7c2a4acdcad0"}, - {file = "propcache-0.4.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:a60634a9de41f363923c6adfb83105d39e49f7a3058511563ed3de6748661af6"}, - {file = "propcache-0.4.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c9b8119244d122241a9c4566bce49bb20408a6827044155856735cf14189a7da"}, - {file = "propcache-0.4.0-cp313-cp313t-win32.whl", hash = "sha256:515b610a364c8cdd2b72c734cc97dece85c416892ea8d5c305624ac8734e81db"}, - {file = "propcache-0.4.0-cp313-cp313t-win_amd64.whl", hash = "sha256:7ea86eb32e74f9902df57e8608e8ac66f1e1e1d24d1ed2ddeb849888413b924d"}, - {file = "propcache-0.4.0-cp313-cp313t-win_arm64.whl", hash = "sha256:c1443fa4bb306461a3a8a52b7de0932a2515b100ecb0ebc630cc3f87d451e0a9"}, - {file = "propcache-0.4.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:de8e310d24b5a61de08812dd70d5234da1458d41b059038ee7895a9e4c8cae79"}, - {file = "propcache-0.4.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:55a54de5266bc44aa274915cdf388584fa052db8748a869e5500ab5993bac3f4"}, - {file = "propcache-0.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:88d50d662c917ec2c9d3858920aa7b9d5bfb74ab9c51424b775ccbe683cb1b4e"}, - {file = "propcache-0.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ae3adf88a66f5863cf79394bc359da523bb27a2ed6ba9898525a6a02b723bfc5"}, - {file = "propcache-0.4.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7f088e21d15b3abdb9047e4b7b7a0acd79bf166893ac2b34a72ab1062feb219e"}, - {file = "propcache-0.4.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a4efbaf10793fd574c76a5732c75452f19d93df6e0f758c67dd60552ebd8614b"}, - {file = "propcache-0.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:681a168d06284602d56e97f09978057aa88bcc4177352b875b3d781df4efd4cb"}, - {file = "propcache-0.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a7f06f077fc4ef37e8a37ca6bbb491b29e29db9fb28e29cf3896aad10dbd4137"}, - {file = "propcache-0.4.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:082a643479f49a6778dcd68a80262fc324b14fd8e9b1a5380331fe41adde1738"}, - {file = "propcache-0.4.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:26692850120241a99bb4a4eec675cd7b4fdc431144f0d15ef69f7f8599f6165f"}, - {file = "propcache-0.4.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:33ad7d37b9a386f97582f5d042cc7b8d4b3591bb384cf50866b749a17e4dba90"}, - {file = "propcache-0.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1e7fd82d4a5b7583588f103b0771e43948532f1292105f13ee6f3b300933c4ca"}, - {file = "propcache-0.4.0-cp314-cp314-win32.whl", hash = "sha256:213eb0d3bc695a70cffffe11a1c2e1c2698d89ffd8dba35a49bc44a035d45c93"}, - {file = "propcache-0.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:087e2d3d7613e1b59b2ffca0daabd500c1a032d189c65625ee05ea114afcad0b"}, - {file = "propcache-0.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:94b0f7407d18001dbdcbb239512e753b1b36725a6e08a4983be1c948f5435f79"}, - {file = "propcache-0.4.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:b730048ae8b875e2c0af1a09ca31b303fc7b5ed27652beec03fa22b29545aec9"}, - {file = "propcache-0.4.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:f495007ada16a4e16312b502636fafff42a9003adf1d4fb7541e0a0870bc056f"}, - {file = "propcache-0.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:659a0ea6d9017558ed7af00fb4028186f64d0ba9adfc70a4d2c85fcd3d026321"}, - {file = "propcache-0.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d74aa60b1ec076d4d5dcde27c9a535fc0ebb12613f599681c438ca3daa68acac"}, - {file = "propcache-0.4.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:34000e31795bdcda9826e0e70e783847a42e3dcd0d6416c5d3cb717905ebaec0"}, - {file = "propcache-0.4.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:bcb5bfac5b9635e6fc520c8af6efc7a0a56f12a1fe9e9d3eb4328537e316dd6a"}, - {file = "propcache-0.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0ea11fceb31fa95b0fa2007037f19e922e2caceb7dc6c6cac4cb56e2d291f1a2"}, - {file = "propcache-0.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:cd8684f628fe285ea5c86f88e1c30716239dc9d6ac55e7851a4b7f555b628da3"}, - {file = "propcache-0.4.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:790286d3d542c0ef9f6d0280d1049378e5e776dcba780d169298f664c39394db"}, - {file = "propcache-0.4.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:009093c9b5dbae114a5958e6a649f8a5d94dd6866b0f82b60395eb92c58002d4"}, - {file = "propcache-0.4.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:728d98179e92d77096937fdfecd2c555a3d613abe56c9909165c24196a3b5012"}, - {file = "propcache-0.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a9725d96a81e17e48a0fe82d0c3de2f5e623d7163fec70a6c7df90753edd1bec"}, - {file = "propcache-0.4.0-cp314-cp314t-win32.whl", hash = "sha256:0964c55c95625193defeb4fd85f8f28a9a754ed012cab71127d10e3dc66b1373"}, - {file = "propcache-0.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:24403152e41abf09488d3ae9c0c3bf7ff93e2fb12b435390718f21810353db28"}, - {file = "propcache-0.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0363a696a9f24b37a04ed5e34c2e07ccbe92798c998d37729551120a1bb744c4"}, - {file = "propcache-0.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0cd30341142c68377cf3c4e2d9f0581e6e528694b2d57c62c786be441053d2fc"}, - {file = "propcache-0.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2c46d37955820dd883cf9156ceb7825b8903e910bdd869902e20a5ac4ecd2c8b"}, - {file = "propcache-0.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0b12df77eb19266efd146627a65b8ad414f9d15672d253699a50c8205661a820"}, - {file = "propcache-0.4.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1cdabd60e109506462e6a7b37008e57979e737dc6e7dfbe1437adcfe354d1a0a"}, - {file = "propcache-0.4.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:65ff56a31f25925ef030b494fe63289bf07ef0febe6da181b8219146c590e185"}, - {file = "propcache-0.4.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:96153e037ae065bb71cae889f23c933190d81ae183f3696a030b47352fd8655d"}, - {file = "propcache-0.4.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4bf95be277fbb51513895c2cecc81ab12a421cdbd8837f159828a919a0167f96"}, - {file = "propcache-0.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8d18d796ffecdc8253742fd53a94ceee2e77ad149eb9ed5960c2856b5f692f71"}, - {file = "propcache-0.4.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4a52c25a51d5894ba60c567b0dbcf73de2f3cd642cf5343679e07ca3a768b085"}, - {file = "propcache-0.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:e0ce7f3d1faf7ad58652ed758cc9753049af5308b38f89948aa71793282419c5"}, - {file = "propcache-0.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:545987971b2aded25ba4698135ea0ae128836e7deb6e18c29a581076aaef44aa"}, - {file = "propcache-0.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7da5c4c72ae40fd3ce87213ab057db66df53e55600d0b9e72e2b7f5a470a2cc4"}, - {file = "propcache-0.4.0-cp39-cp39-win32.whl", hash = "sha256:2015218812ee8f13bbaebc9f52b1e424cc130b68d4857bef018e65e3834e1c4d"}, - {file = "propcache-0.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:39f0f6a3b56e82dc91d84c763b783c5c33720a33c70ee48a1c13ba800ac1fa69"}, - {file = "propcache-0.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:236c8da353ea7c22a8e963ab78cddb1126f700ae9538e2c4c6ef471e5545494b"}, - {file = "propcache-0.4.0-py3-none-any.whl", hash = "sha256:015b2ca2f98ea9e08ac06eecc409d5d988f78c5fd5821b2ad42bc9afcd6b1557"}, - {file = "propcache-0.4.0.tar.gz", hash = "sha256:c1ad731253eb738f9cadd9fa1844e019576c70bca6a534252e97cf33a57da529"}, + {file = "propcache-0.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c2d1fa3201efaf55d730400d945b5b3ab6e672e100ba0f9a409d950ab25d7db"}, + {file = "propcache-0.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1eb2994229cc8ce7fe9b3db88f5465f5fd8651672840b2e426b88cdb1a30aac8"}, + {file = "propcache-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:66c1f011f45a3b33d7bcb22daed4b29c0c9e2224758b6be00686731e1b46f925"}, + {file = "propcache-0.4.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9a52009f2adffe195d0b605c25ec929d26b36ef986ba85244891dee3b294df21"}, + {file = "propcache-0.4.1-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5d4e2366a9c7b837555cf02fb9be2e3167d333aff716332ef1b7c3a142ec40c5"}, + {file = "propcache-0.4.1-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:9d2b6caef873b4f09e26ea7e33d65f42b944837563a47a94719cc3544319a0db"}, + {file = "propcache-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b16ec437a8c8a965ecf95739448dd938b5c7f56e67ea009f4300d8df05f32b7"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:296f4c8ed03ca7476813fe666c9ea97869a8d7aec972618671b33a38a5182ef4"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:1f0978529a418ebd1f49dad413a2b68af33f85d5c5ca5c6ca2a3bed375a7ac60"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fd138803047fb4c062b1c1dd95462f5209456bfab55c734458f15d11da288f8f"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8c9b3cbe4584636d72ff556d9036e0c9317fa27b3ac1f0f558e7e84d1c9c5900"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f93243fdc5657247533273ac4f86ae106cc6445a0efacb9a1bfe982fcfefd90c"}, + {file = "propcache-0.4.1-cp310-cp310-win32.whl", hash = "sha256:a0ee98db9c5f80785b266eb805016e36058ac72c51a064040f2bc43b61101cdb"}, + {file = "propcache-0.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:1cdb7988c4e5ac7f6d175a28a9aa0c94cb6f2ebe52756a3c0cda98d2809a9e37"}, + {file = "propcache-0.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:d82ad62b19645419fe79dd63b3f9253e15b30e955c0170e5cebc350c1844e581"}, + {file = "propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf"}, + {file = "propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5"}, + {file = "propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e"}, + {file = "propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566"}, + {file = "propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165"}, + {file = "propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc"}, + {file = "propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f"}, + {file = "propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1"}, + {file = "propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6"}, + {file = "propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239"}, + {file = "propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2"}, + {file = "propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403"}, + {file = "propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207"}, + {file = "propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72"}, + {file = "propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367"}, + {file = "propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4"}, + {file = "propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75"}, + {file = "propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8"}, + {file = "propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db"}, + {file = "propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1"}, + {file = "propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf"}, + {file = "propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311"}, + {file = "propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74"}, + {file = "propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe"}, + {file = "propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af"}, + {file = "propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c"}, + {file = "propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66"}, + {file = "propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81"}, + {file = "propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e"}, + {file = "propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1"}, + {file = "propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b"}, + {file = "propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566"}, + {file = "propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835"}, + {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e"}, + {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859"}, + {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b"}, + {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1"}, + {file = "propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717"}, + {file = "propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37"}, + {file = "propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a"}, + {file = "propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12"}, + {file = "propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c"}, + {file = "propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded"}, + {file = "propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641"}, + {file = "propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4"}, + {file = "propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44"}, + {file = "propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144"}, + {file = "propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f"}, + {file = "propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153"}, + {file = "propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992"}, + {file = "propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f"}, + {file = "propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393"}, + {file = "propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0"}, + {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a"}, + {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be"}, + {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc"}, + {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455"}, + {file = "propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85"}, + {file = "propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1"}, + {file = "propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9"}, + {file = "propcache-0.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3d233076ccf9e450c8b3bc6720af226b898ef5d051a2d145f7d765e6e9f9bcff"}, + {file = "propcache-0.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:357f5bb5c377a82e105e44bd3d52ba22b616f7b9773714bff93573988ef0a5fb"}, + {file = "propcache-0.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cbc3b6dfc728105b2a57c06791eb07a94229202ea75c59db644d7d496b698cac"}, + {file = "propcache-0.4.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:182b51b421f0501952d938dc0b0eb45246a5b5153c50d42b495ad5fb7517c888"}, + {file = "propcache-0.4.1-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4b536b39c5199b96fc6245eb5fb796c497381d3942f169e44e8e392b29c9ebcc"}, + {file = "propcache-0.4.1-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:db65d2af507bbfbdcedb254a11149f894169d90488dd3e7190f7cdcb2d6cd57a"}, + {file = "propcache-0.4.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd2dbc472da1f772a4dae4fa24be938a6c544671a912e30529984dd80400cd88"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:daede9cd44e0f8bdd9e6cc9a607fc81feb80fae7a5fc6cecaff0e0bb32e42d00"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:71b749281b816793678ae7f3d0d84bd36e694953822eaad408d682efc5ca18e0"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:0002004213ee1f36cfb3f9a42b5066100c44276b9b72b4e1504cddd3d692e86e"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fe49d0a85038f36ba9e3ffafa1103e61170b28e95b16622e11be0a0ea07c6781"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:99d43339c83aaf4d32bda60928231848eee470c6bda8d02599cc4cebe872d183"}, + {file = "propcache-0.4.1-cp39-cp39-win32.whl", hash = "sha256:a129e76735bc792794d5177069691c3217898b9f5cee2b2661471e52ffe13f19"}, + {file = "propcache-0.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:948dab269721ae9a87fd16c514a0a2c2a1bdb23a9a61b969b0f9d9ee2968546f"}, + {file = "propcache-0.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:5fd37c406dd6dc85aa743e214cef35dc54bbdd1419baac4f6ae5e5b1a2976938"}, + {file = "propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237"}, + {file = "propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d"}, ] [[package]] @@ -3176,6 +3224,8 @@ files = [ {file = "ruamel.yaml.clib-0.2.14-cp39-cp39-win32.whl", hash = "sha256:6d5472f63a31b042aadf5ed28dd3ef0523da49ac17f0463e10fda9c4a2773352"}, {file = "ruamel.yaml.clib-0.2.14-cp39-cp39-win_amd64.whl", hash = "sha256:8dd3c2cc49caa7a8d64b67146462aed6723a0495e44bf0aa0a2e94beaa8432f6"}, {file = "ruamel.yaml.clib-0.2.14.tar.gz", hash = "sha256:803f5044b13602d58ea378576dd75aa759f52116a0232608e8fdada4da33752e"}, + {file = "ruamel_yaml_clib-0.2.14-cp314-cp314-win32.whl", hash = "sha256:9b4104bf43ca0cd4e6f738cb86326a3b2f6eef00f417bd1e7efb7bdffe74c539"}, + {file = "ruamel_yaml_clib-0.2.14-cp314-cp314-win_amd64.whl", hash = "sha256:13997d7d354a9890ea1ec5937a219817464e5cc344805b37671562a401ca3008"}, ] [[package]] @@ -3540,98 +3590,81 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "websockets" -version = "13.1" +version = "10.4" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" groups = ["main"] files = [ - {file = "websockets-13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f48c749857f8fb598fb890a75f540e3221d0976ed0bf879cf3c7eef34151acee"}, - {file = "websockets-13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7e72ce6bda6fb9409cc1e8164dd41d7c91466fb599eb047cfda72fe758a34a7"}, - {file = "websockets-13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f779498eeec470295a2b1a5d97aa1bc9814ecd25e1eb637bd9d1c73a327387f6"}, - {file = "websockets-13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676df3fe46956fbb0437d8800cd5f2b6d41143b6e7e842e60554398432cf29b"}, - {file = "websockets-13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7affedeb43a70351bb811dadf49493c9cfd1ed94c9c70095fd177e9cc1541fa"}, - {file = "websockets-13.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1971e62d2caa443e57588e1d82d15f663b29ff9dfe7446d9964a4b6f12c1e700"}, - {file = "websockets-13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5f2e75431f8dc4a47f31565a6e1355fb4f2ecaa99d6b89737527ea917066e26c"}, - {file = "websockets-13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58cf7e75dbf7e566088b07e36ea2e3e2bd5676e22216e4cad108d4df4a7402a0"}, - {file = "websockets-13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c90d6dec6be2c7d03378a574de87af9b1efea77d0c52a8301dd831ece938452f"}, - {file = "websockets-13.1-cp310-cp310-win32.whl", hash = "sha256:730f42125ccb14602f455155084f978bd9e8e57e89b569b4d7f0f0c17a448ffe"}, - {file = "websockets-13.1-cp310-cp310-win_amd64.whl", hash = "sha256:5993260f483d05a9737073be197371940c01b257cc45ae3f1d5d7adb371b266a"}, - {file = "websockets-13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:61fc0dfcda609cda0fc9fe7977694c0c59cf9d749fbb17f4e9483929e3c48a19"}, - {file = "websockets-13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ceec59f59d092c5007e815def4ebb80c2de330e9588e101cf8bd94c143ec78a5"}, - {file = "websockets-13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1dca61c6db1166c48b95198c0b7d9c990b30c756fc2923cc66f68d17dc558fd"}, - {file = "websockets-13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:308e20f22c2c77f3f39caca508e765f8725020b84aa963474e18c59accbf4c02"}, - {file = "websockets-13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d516c325e6540e8a57b94abefc3459d7dab8ce52ac75c96cad5549e187e3a7"}, - {file = "websockets-13.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c6e35319b46b99e168eb98472d6c7d8634ee37750d7693656dc766395df096"}, - {file = "websockets-13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5f9fee94ebafbc3117c30be1844ed01a3b177bb6e39088bc6b2fa1dc15572084"}, - {file = "websockets-13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7c1e90228c2f5cdde263253fa5db63e6653f1c00e7ec64108065a0b9713fa1b3"}, - {file = "websockets-13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6548f29b0e401eea2b967b2fdc1c7c7b5ebb3eeb470ed23a54cd45ef078a0db9"}, - {file = "websockets-13.1-cp311-cp311-win32.whl", hash = "sha256:c11d4d16e133f6df8916cc5b7e3e96ee4c44c936717d684a94f48f82edb7c92f"}, - {file = "websockets-13.1-cp311-cp311-win_amd64.whl", hash = "sha256:d04f13a1d75cb2b8382bdc16ae6fa58c97337253826dfe136195b7f89f661557"}, - {file = "websockets-13.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9d75baf00138f80b48f1eac72ad1535aac0b6461265a0bcad391fc5aba875cfc"}, - {file = "websockets-13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9b6f347deb3dcfbfde1c20baa21c2ac0751afaa73e64e5b693bb2b848efeaa49"}, - {file = "websockets-13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de58647e3f9c42f13f90ac7e5f58900c80a39019848c5547bc691693098ae1bd"}, - {file = "websockets-13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1b54689e38d1279a51d11e3467dd2f3a50f5f2e879012ce8f2d6943f00e83f0"}, - {file = "websockets-13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf1781ef73c073e6b0f90af841aaf98501f975d306bbf6221683dd594ccc52b6"}, - {file = "websockets-13.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d23b88b9388ed85c6faf0e74d8dec4f4d3baf3ecf20a65a47b836d56260d4b9"}, - {file = "websockets-13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3c78383585f47ccb0fcf186dcb8a43f5438bd7d8f47d69e0b56f71bf431a0a68"}, - {file = "websockets-13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d6d300f8ec35c24025ceb9b9019ae9040c1ab2f01cddc2bcc0b518af31c75c14"}, - {file = "websockets-13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9dcaf8b0cc72a392760bb8755922c03e17a5a54e08cca58e8b74f6902b433cf"}, - {file = "websockets-13.1-cp312-cp312-win32.whl", hash = "sha256:2f85cf4f2a1ba8f602298a853cec8526c2ca42a9a4b947ec236eaedb8f2dc80c"}, - {file = "websockets-13.1-cp312-cp312-win_amd64.whl", hash = "sha256:38377f8b0cdeee97c552d20cf1865695fcd56aba155ad1b4ca8779a5b6ef4ac3"}, - {file = "websockets-13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a9ab1e71d3d2e54a0aa646ab6d4eebfaa5f416fe78dfe4da2839525dc5d765c6"}, - {file = "websockets-13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b9d7439d7fab4dce00570bb906875734df13d9faa4b48e261c440a5fec6d9708"}, - {file = "websockets-13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327b74e915cf13c5931334c61e1a41040e365d380f812513a255aa804b183418"}, - {file = "websockets-13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:325b1ccdbf5e5725fdcb1b0e9ad4d2545056479d0eee392c291c1bf76206435a"}, - {file = "websockets-13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:346bee67a65f189e0e33f520f253d5147ab76ae42493804319b5716e46dddf0f"}, - {file = "websockets-13.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91a0fa841646320ec0d3accdff5b757b06e2e5c86ba32af2e0815c96c7a603c5"}, - {file = "websockets-13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:18503d2c5f3943e93819238bf20df71982d193f73dcecd26c94514f417f6b135"}, - {file = "websockets-13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9cd1af7e18e5221d2878378fbc287a14cd527fdd5939ed56a18df8a31136bb2"}, - {file = "websockets-13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:70c5be9f416aa72aab7a2a76c90ae0a4fe2755c1816c153c1a2bcc3333ce4ce6"}, - {file = "websockets-13.1-cp313-cp313-win32.whl", hash = "sha256:624459daabeb310d3815b276c1adef475b3e6804abaf2d9d2c061c319f7f187d"}, - {file = "websockets-13.1-cp313-cp313-win_amd64.whl", hash = "sha256:c518e84bb59c2baae725accd355c8dc517b4a3ed8db88b4bc93c78dae2974bf2"}, - {file = "websockets-13.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c7934fd0e920e70468e676fe7f1b7261c1efa0d6c037c6722278ca0228ad9d0d"}, - {file = "websockets-13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:149e622dc48c10ccc3d2760e5f36753db9cacf3ad7bc7bbbfd7d9c819e286f23"}, - {file = "websockets-13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a569eb1b05d72f9bce2ebd28a1ce2054311b66677fcd46cf36204ad23acead8c"}, - {file = "websockets-13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95df24ca1e1bd93bbca51d94dd049a984609687cb2fb08a7f2c56ac84e9816ea"}, - {file = "websockets-13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8dbb1bf0c0a4ae8b40bdc9be7f644e2f3fb4e8a9aca7145bfa510d4a374eeb7"}, - {file = "websockets-13.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:035233b7531fb92a76beefcbf479504db8c72eb3bff41da55aecce3a0f729e54"}, - {file = "websockets-13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e4450fc83a3df53dec45922b576e91e94f5578d06436871dce3a6be38e40f5db"}, - {file = "websockets-13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:463e1c6ec853202dd3657f156123d6b4dad0c546ea2e2e38be2b3f7c5b8e7295"}, - {file = "websockets-13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6d6855bbe70119872c05107e38fbc7f96b1d8cb047d95c2c50869a46c65a8e96"}, - {file = "websockets-13.1-cp38-cp38-win32.whl", hash = "sha256:204e5107f43095012b00f1451374693267adbb832d29966a01ecc4ce1db26faf"}, - {file = "websockets-13.1-cp38-cp38-win_amd64.whl", hash = "sha256:485307243237328c022bc908b90e4457d0daa8b5cf4b3723fd3c4a8012fce4c6"}, - {file = "websockets-13.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9b37c184f8b976f0c0a231a5f3d6efe10807d41ccbe4488df8c74174805eea7d"}, - {file = "websockets-13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:163e7277e1a0bd9fb3c8842a71661ad19c6aa7bb3d6678dc7f89b17fbcc4aeb7"}, - {file = "websockets-13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4b889dbd1342820cc210ba44307cf75ae5f2f96226c0038094455a96e64fb07a"}, - {file = "websockets-13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:586a356928692c1fed0eca68b4d1c2cbbd1ca2acf2ac7e7ebd3b9052582deefa"}, - {file = "websockets-13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7bd6abf1e070a6b72bfeb71049d6ad286852e285f146682bf30d0296f5fbadfa"}, - {file = "websockets-13.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2aad13a200e5934f5a6767492fb07151e1de1d6079c003ab31e1823733ae79"}, - {file = "websockets-13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:df01aea34b6e9e33572c35cd16bae5a47785e7d5c8cb2b54b2acdb9678315a17"}, - {file = "websockets-13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e54affdeb21026329fb0744ad187cf812f7d3c2aa702a5edb562b325191fcab6"}, - {file = "websockets-13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ef8aa8bdbac47f4968a5d66462a2a0935d044bf35c0e5a8af152d58516dbeb5"}, - {file = "websockets-13.1-cp39-cp39-win32.whl", hash = "sha256:deeb929efe52bed518f6eb2ddc00cc496366a14c726005726ad62c2dd9017a3c"}, - {file = "websockets-13.1-cp39-cp39-win_amd64.whl", hash = "sha256:7c65ffa900e7cc958cd088b9a9157a8141c991f8c53d11087e6fb7277a03f81d"}, - {file = "websockets-13.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5dd6da9bec02735931fccec99d97c29f47cc61f644264eb995ad6c0c27667238"}, - {file = "websockets-13.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:2510c09d8e8df777177ee3d40cd35450dc169a81e747455cc4197e63f7e7bfe5"}, - {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1c3cf67185543730888b20682fb186fc8d0fa6f07ccc3ef4390831ab4b388d9"}, - {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcc03c8b72267e97b49149e4863d57c2d77f13fae12066622dc78fe322490fe6"}, - {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:004280a140f220c812e65f36944a9ca92d766b6cc4560be652a0a3883a79ed8a"}, - {file = "websockets-13.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e2620453c075abeb0daa949a292e19f56de518988e079c36478bacf9546ced23"}, - {file = "websockets-13.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9156c45750b37337f7b0b00e6248991a047be4aa44554c9886fe6bdd605aab3b"}, - {file = "websockets-13.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:80c421e07973a89fbdd93e6f2003c17d20b69010458d3a8e37fb47874bd67d51"}, - {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82d0ba76371769d6a4e56f7e83bb8e81846d17a6190971e38b5de108bde9b0d7"}, - {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9875a0143f07d74dc5e1ded1c4581f0d9f7ab86c78994e2ed9e95050073c94d"}, - {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a11e38ad8922c7961447f35c7b17bffa15de4d17c70abd07bfbe12d6faa3e027"}, - {file = "websockets-13.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4059f790b6ae8768471cddb65d3c4fe4792b0ab48e154c9f0a04cefaabcd5978"}, - {file = "websockets-13.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:25c35bf84bf7c7369d247f0b8cfa157f989862c49104c5cf85cb5436a641d93e"}, - {file = "websockets-13.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:83f91d8a9bb404b8c2c41a707ac7f7f75b9442a0a876df295de27251a856ad09"}, - {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a43cfdcddd07f4ca2b1afb459824dd3c6d53a51410636a2c7fc97b9a8cf4842"}, - {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48a2ef1381632a2f0cb4efeff34efa97901c9fbc118e01951ad7cfc10601a9bb"}, - {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459bf774c754c35dbb487360b12c5727adab887f1622b8aed5755880a21c4a20"}, - {file = "websockets-13.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:95858ca14a9f6fa8413d29e0a585b31b278388aa775b8a81fa24830123874678"}, - {file = "websockets-13.1-py3-none-any.whl", hash = "sha256:a9a396a6ad26130cdae92ae10c36af09d9bfe6cafe69670fd3b6da9b07b4044f"}, - {file = "websockets-13.1.tar.gz", hash = "sha256:a3b3366087c1bc0a2795111edcadddb8b3b59509d5db5d7ea3fdd69f954a8878"}, + {file = "websockets-10.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d58804e996d7d2307173d56c297cf7bc132c52df27a3efaac5e8d43e36c21c48"}, + {file = "websockets-10.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc0b82d728fe21a0d03e65f81980abbbcb13b5387f733a1a870672c5be26edab"}, + {file = "websockets-10.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ba089c499e1f4155d2a3c2a05d2878a3428cf321c848f2b5a45ce55f0d7d310c"}, + {file = "websockets-10.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33d69ca7612f0ddff3316b0c7b33ca180d464ecac2d115805c044bf0a3b0d032"}, + {file = "websockets-10.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62e627f6b6d4aed919a2052efc408da7a545c606268d5ab5bfab4432734b82b4"}, + {file = "websockets-10.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ea7b82bfcae927eeffc55d2ffa31665dc7fec7b8dc654506b8e5a518eb4d50"}, + {file = "websockets-10.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e0cb5cc6ece6ffa75baccfd5c02cffe776f3f5c8bf486811f9d3ea3453676ce8"}, + {file = "websockets-10.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae5e95cfb53ab1da62185e23b3130e11d64431179debac6dc3c6acf08760e9b1"}, + {file = "websockets-10.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7c584f366f46ba667cfa66020344886cf47088e79c9b9d39c84ce9ea98aaa331"}, + {file = "websockets-10.4-cp310-cp310-win32.whl", hash = "sha256:b029fb2032ae4724d8ae8d4f6b363f2cc39e4c7b12454df8df7f0f563ed3e61a"}, + {file = "websockets-10.4-cp310-cp310-win_amd64.whl", hash = "sha256:8dc96f64ae43dde92530775e9cb169979f414dcf5cff670455d81a6823b42089"}, + {file = "websockets-10.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:47a2964021f2110116cc1125b3e6d87ab5ad16dea161949e7244ec583b905bb4"}, + {file = "websockets-10.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e789376b52c295c4946403bd0efecf27ab98f05319df4583d3c48e43c7342c2f"}, + {file = "websockets-10.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7d3f0b61c45c3fa9a349cf484962c559a8a1d80dae6977276df8fd1fa5e3cb8c"}, + {file = "websockets-10.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f55b5905705725af31ccef50e55391621532cd64fbf0bc6f4bac935f0fccec46"}, + {file = "websockets-10.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00c870522cdb69cd625b93f002961ffb0c095394f06ba8c48f17eef7c1541f96"}, + {file = "websockets-10.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f38706e0b15d3c20ef6259fd4bc1700cd133b06c3c1bb108ffe3f8947be15fa"}, + {file = "websockets-10.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f2c38d588887a609191d30e902df2a32711f708abfd85d318ca9b367258cfd0c"}, + {file = "websockets-10.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fe10ddc59b304cb19a1bdf5bd0a7719cbbc9fbdd57ac80ed436b709fcf889106"}, + {file = "websockets-10.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:90fcf8929836d4a0e964d799a58823547df5a5e9afa83081761630553be731f9"}, + {file = "websockets-10.4-cp311-cp311-win32.whl", hash = "sha256:b9968694c5f467bf67ef97ae7ad4d56d14be2751000c1207d31bf3bb8860bae8"}, + {file = "websockets-10.4-cp311-cp311-win_amd64.whl", hash = "sha256:a7a240d7a74bf8d5cb3bfe6be7f21697a28ec4b1a437607bae08ac7acf5b4882"}, + {file = "websockets-10.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:74de2b894b47f1d21cbd0b37a5e2b2392ad95d17ae983e64727e18eb281fe7cb"}, + {file = "websockets-10.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3a686ecb4aa0d64ae60c9c9f1a7d5d46cab9bfb5d91a2d303d00e2cd4c4c5cc"}, + {file = "websockets-10.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d15c968ea7a65211e084f523151dbf8ae44634de03c801b8bd070b74e85033"}, + {file = "websockets-10.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00213676a2e46b6ebf6045bc11d0f529d9120baa6f58d122b4021ad92adabd41"}, + {file = "websockets-10.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e23173580d740bf8822fd0379e4bf30aa1d5a92a4f252d34e893070c081050df"}, + {file = "websockets-10.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:dd500e0a5e11969cdd3320935ca2ff1e936f2358f9c2e61f100a1660933320ea"}, + {file = "websockets-10.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4239b6027e3d66a89446908ff3027d2737afc1a375f8fd3eea630a4842ec9a0c"}, + {file = "websockets-10.4-cp37-cp37m-win32.whl", hash = "sha256:8a5cc00546e0a701da4639aa0bbcb0ae2bb678c87f46da01ac2d789e1f2d2038"}, + {file = "websockets-10.4-cp37-cp37m-win_amd64.whl", hash = "sha256:a9f9a735deaf9a0cadc2d8c50d1a5bcdbae8b6e539c6e08237bc4082d7c13f28"}, + {file = "websockets-10.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5c1289596042fad2cdceb05e1ebf7aadf9995c928e0da2b7a4e99494953b1b94"}, + {file = "websockets-10.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0cff816f51fb33c26d6e2b16b5c7d48eaa31dae5488ace6aae468b361f422b63"}, + {file = "websockets-10.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dd9becd5fe29773d140d68d607d66a38f60e31b86df75332703757ee645b6faf"}, + {file = "websockets-10.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45ec8e75b7dbc9539cbfafa570742fe4f676eb8b0d3694b67dabe2f2ceed8aa6"}, + {file = "websockets-10.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f72e5cd0f18f262f5da20efa9e241699e0cf3a766317a17392550c9ad7b37d8"}, + {file = "websockets-10.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:185929b4808b36a79c65b7865783b87b6841e852ef5407a2fb0c03381092fa3b"}, + {file = "websockets-10.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7d27a7e34c313b3a7f91adcd05134315002aaf8540d7b4f90336beafaea6217c"}, + {file = "websockets-10.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:884be66c76a444c59f801ac13f40c76f176f1bfa815ef5b8ed44321e74f1600b"}, + {file = "websockets-10.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:931c039af54fc195fe6ad536fde4b0de04da9d5916e78e55405436348cfb0e56"}, + {file = "websockets-10.4-cp38-cp38-win32.whl", hash = "sha256:db3c336f9eda2532ec0fd8ea49fef7a8df8f6c804cdf4f39e5c5c0d4a4ad9a7a"}, + {file = "websockets-10.4-cp38-cp38-win_amd64.whl", hash = "sha256:48c08473563323f9c9debac781ecf66f94ad5a3680a38fe84dee5388cf5acaf6"}, + {file = "websockets-10.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:40e826de3085721dabc7cf9bfd41682dadc02286d8cf149b3ad05bff89311e4f"}, + {file = "websockets-10.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:56029457f219ade1f2fc12a6504ea61e14ee227a815531f9738e41203a429112"}, + {file = "websockets-10.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f5fc088b7a32f244c519a048c170f14cf2251b849ef0e20cbbb0fdf0fdaf556f"}, + {file = "websockets-10.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fc8709c00704194213d45e455adc106ff9e87658297f72d544220e32029cd3d"}, + {file = "websockets-10.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0154f7691e4fe6c2b2bc275b5701e8b158dae92a1ab229e2b940efe11905dff4"}, + {file = "websockets-10.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c6d2264f485f0b53adf22697ac11e261ce84805c232ed5dbe6b1bcb84b00ff0"}, + {file = "websockets-10.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9bc42e8402dc5e9905fb8b9649f57efcb2056693b7e88faa8fb029256ba9c68c"}, + {file = "websockets-10.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:edc344de4dac1d89300a053ac973299e82d3db56330f3494905643bb68801269"}, + {file = "websockets-10.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:84bc2a7d075f32f6ed98652db3a680a17a4edb21ca7f80fe42e38753a58ee02b"}, + {file = "websockets-10.4-cp39-cp39-win32.whl", hash = "sha256:c94ae4faf2d09f7c81847c63843f84fe47bf6253c9d60b20f25edfd30fb12588"}, + {file = "websockets-10.4-cp39-cp39-win_amd64.whl", hash = "sha256:bbccd847aa0c3a69b5f691a84d2341a4f8a629c6922558f2a70611305f902d74"}, + {file = "websockets-10.4-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:82ff5e1cae4e855147fd57a2863376ed7454134c2bf49ec604dfe71e446e2193"}, + {file = "websockets-10.4-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d210abe51b5da0ffdbf7b43eed0cfdff8a55a1ab17abbec4301c9ff077dd0342"}, + {file = "websockets-10.4-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:942de28af58f352a6f588bc72490ae0f4ccd6dfc2bd3de5945b882a078e4e179"}, + {file = "websockets-10.4-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9b27d6c1c6cd53dc93614967e9ce00ae7f864a2d9f99fe5ed86706e1ecbf485"}, + {file = "websockets-10.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:3d3cac3e32b2c8414f4f87c1b2ab686fa6284a980ba283617404377cd448f631"}, + {file = "websockets-10.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:da39dd03d130162deb63da51f6e66ed73032ae62e74aaccc4236e30edccddbb0"}, + {file = "websockets-10.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:389f8dbb5c489e305fb113ca1b6bdcdaa130923f77485db5b189de343a179393"}, + {file = "websockets-10.4-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09a1814bb15eff7069e51fed0826df0bc0702652b5cb8f87697d469d79c23576"}, + {file = "websockets-10.4-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff64a1d38d156d429404aaa84b27305e957fd10c30e5880d1765c9480bea490f"}, + {file = "websockets-10.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b343f521b047493dc4022dd338fc6db9d9282658862756b4f6fd0e996c1380e1"}, + {file = "websockets-10.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:932af322458da7e4e35df32f050389e13d3d96b09d274b22a7aa1808f292fee4"}, + {file = "websockets-10.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6a4162139374a49eb18ef5b2f4da1dd95c994588f5033d64e0bbfda4b6b6fcf"}, + {file = "websockets-10.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c57e4c1349fbe0e446c9fa7b19ed2f8a4417233b6984277cce392819123142d3"}, + {file = "websockets-10.4-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b627c266f295de9dea86bd1112ed3d5fafb69a348af30a2422e16590a8ecba13"}, + {file = "websockets-10.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:05a7233089f8bd355e8cbe127c2e8ca0b4ea55467861906b80d2ebc7db4d6b72"}, + {file = "websockets-10.4.tar.gz", hash = "sha256:eef610b23933c54d5d921c92578ae5f89813438fded840c2e9809d378dc765d3"}, ] [[package]] @@ -3774,4 +3807,4 @@ propcache = ">=0.2.1" [metadata] lock-version = "2.1" python-versions = "^3.10" -content-hash = "74cd51c2f332c3ab9addbf3fed3075b6ac8e39a1c841b3419a7e6bcfc61e6ace" +content-hash = "f6992f5ad41711def17e012096e35ea2b01aa576fdaf10df6faf5d09fdbac87f" diff --git a/pyproject.toml b/pyproject.toml index c80b80a..7962359 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "x10-python-trading-starknet" -version = "0.0.17" +version = "1.0.5" description = "Python client for X10 API" authors = ["X10 "] repository = "https://github.com/x10xchange/python_sdk" @@ -19,7 +19,7 @@ classifiers = [ "Programming Language :: Python :: 3.10", "Topic :: Software Development :: Libraries :: Python Modules", ] -packages = [{ include = "x10" }] +packages = [{ include = "x10" }, { include = "extended" }] [tool.poetry.dependencies] aiohttp = ">=3.10.11" @@ -31,7 +31,9 @@ pyyaml = ">=6.0.1" sortedcontainers = ">=2.4.0" strenum = "^0.4.15" tenacity = "^9.1.2" -websockets = ">=12.0,<14.0" +nest-asyncio = ">=1.5.6" +websockets = "^10.4" +propcache = ">=0.4.1" [tool.poetry.group.dev.dependencies] black = "==23.12.0" @@ -55,8 +57,14 @@ types-pyyaml = "==6.0.12.12" typing-extensions = ">=4.9.0" +[tool.pytest.ini_options] +pythonpath = ["."] +testpaths = ["tests"] +asyncio_mode = "auto" +addopts = "--import-mode=importlib" + [tool.mypy] -packages = ["examples", "tests", "x10"] +packages = ["examples", "tests", "x10", "extended"] plugins = ["pydantic.mypy"] follow_untyped_imports = true check_untyped_defs = true diff --git a/simple_test.py b/simple_test.py new file mode 100644 index 0000000..cae3bfc --- /dev/null +++ b/simple_test.py @@ -0,0 +1,37 @@ +""" +Simple test to verify native sync implementation is working. +""" + +def test_basic_imports(): + """Test that we can import the native sync classes without dependencies.""" + print("Testing basic native sync imports...") + + try: + # Test direct native sync imports + from extended.api.base_native_sync import BaseNativeSyncClient + print("✅ BaseNativeSyncClient import successful") + + from extended.api.info_native_sync import NativeSyncInfoAPI + print("✅ NativeSyncInfoAPI import successful") + + from extended.api.exchange_native_sync import NativeSyncExchangeAPI + print("✅ NativeSyncExchangeAPI import successful") + + # Test that BaseNativeSyncClient uses requests + import inspect + source = inspect.getsource(BaseNativeSyncClient.__init__) + if 'requests.Session()' in source: + print("✅ BaseNativeSyncClient uses requests.Session()") + else: + print("❌ BaseNativeSyncClient doesn't use requests.Session()") + + return True + + except Exception as e: + print(f"❌ Import failed: {e}") + import traceback + traceback.print_exc() + return False + +if __name__ == "__main__": + test_basic_imports() \ No newline at end of file diff --git a/test_comprehensive_native_sync.py b/test_comprehensive_native_sync.py new file mode 100644 index 0000000..fd61226 --- /dev/null +++ b/test_comprehensive_native_sync.py @@ -0,0 +1,436 @@ +""" +Comprehensive tests for Native Sync Extended SDK. + +Tests all major functionality to ensure the native sync implementation +works correctly and is compatible with ThreadPoolExecutor. +""" + +import sys +import threading +import time +from concurrent.futures import ThreadPoolExecutor +from unittest.mock import Mock, patch, MagicMock +import traceback + + +def test_imports_and_structure(): + """Test that all imports work without async dependencies.""" + print("🧪 Testing imports and structure...") + + try: + # Core native sync imports + from extended.api.base_native_sync import BaseNativeSyncClient + from extended.api.info_native_sync import NativeSyncInfoAPI + from extended.api.exchange_native_sync import NativeSyncExchangeAPI + + # Public API imports + from extended.api.info import InfoAPI + from extended.api.exchange import ExchangeAPI + from extended.client import Client + from extended.setup import setup + + # Check inheritance + assert issubclass(InfoAPI, NativeSyncInfoAPI), "InfoAPI should inherit from NativeSyncInfoAPI" + assert issubclass(ExchangeAPI, NativeSyncExchangeAPI), "ExchangeAPI should inherit from NativeSyncExchangeAPI" + + print(" ✅ All imports successful") + print(" ✅ Proper inheritance structure") + return True + + except Exception as e: + print(f" ❌ Import/structure test failed: {e}") + traceback.print_exc() + return False + + +def test_no_async_dependencies(): + """Test that no async/await code exists in new implementation.""" + print("🧪 Testing for async dependencies...") + + import os + import re + + # Files that should be completely sync + sync_files = [ + "/tmp/extended-sdk-analysis/extended/api/base_native_sync.py", + "/tmp/extended-sdk-analysis/extended/api/info_native_sync.py", + "/tmp/extended-sdk-analysis/extended/api/exchange_native_sync.py", + "/tmp/extended-sdk-analysis/extended/api/base.py", + "/tmp/extended-sdk-analysis/extended/api/info.py", + "/tmp/extended-sdk-analysis/extended/api/exchange.py", + "/tmp/extended-sdk-analysis/extended/client.py", + "/tmp/extended-sdk-analysis/extended/setup.py", + ] + + async_patterns = [ + r'\basync\s+def\b', + r'\bawait\s+', + r'run_sync\(', + r'aiohttp', + r'asyncio\.', + ] + + issues_found = [] + + for file_path in sync_files: + if not os.path.exists(file_path): + continue + + try: + with open(file_path, 'r') as f: + content = f.read() + + for pattern in async_patterns: + matches = re.findall(pattern, content) + if matches: + issues_found.append(f"{file_path}: {pattern} found {len(matches)} times") + + except Exception as e: + issues_found.append(f"{file_path}: Error reading file - {e}") + + if issues_found: + print(" ❌ Async dependencies found:") + for issue in issues_found: + print(f" - {issue}") + return False + else: + print(" ✅ No async dependencies found in native sync files") + return True + + +def test_requests_usage(): + """Test that native sync uses requests instead of aiohttp.""" + print("🧪 Testing HTTP client usage...") + + try: + from extended.api.base_native_sync import BaseNativeSyncClient + import inspect + + # Check BaseNativeSyncClient source + source = inspect.getsource(BaseNativeSyncClient.__init__) + + if 'requests.Session()' in source: + print(" ✅ Uses requests.Session() for HTTP") + else: + print(" ❌ Does not use requests.Session()") + return False + + # Check that aiohttp is not imported + if 'aiohttp' in source: + print(" ❌ Still imports aiohttp") + return False + + print(" ✅ Correct HTTP client implementation") + return True + + except Exception as e: + print(f" ❌ HTTP client test failed: {e}") + return False + + +def test_client_instantiation_mocked(): + """Test client instantiation with mocked dependencies.""" + print("🧪 Testing client instantiation (mocked)...") + + try: + # Mock all problematic imports + with patch('extended.auth_sync.SimpleSyncAuth') as mock_auth_class: + with patch('extended.config_sync.MAINNET_CONFIG') as mock_config: + # Setup mocks + mock_auth = Mock() + mock_auth.address = "0x123456789" + mock_auth.stark_public_key = "0x987654321" + mock_auth.api_key = "test_api_key" + mock_auth_class.return_value = mock_auth + + mock_config.api_base_url = "https://api.extended.com" + + # Test Client creation + from extended.client import Client + + client = Client( + api_key="test_key", + vault=12345, + stark_private_key="0xprivate", + stark_public_key="0xpublic", + testnet=False + ) + + # Test properties + assert client.address == "0x123456789" + assert client.public_key == "0x987654321" + assert client.info is not None + assert client.exchange is not None + + print(" ✅ Client instantiation successful") + return True + + except Exception as e: + print(f" ❌ Client instantiation failed: {e}") + traceback.print_exc() + return False + + +def test_setup_function_mocked(): + """Test setup function with mocked dependencies.""" + print("🧪 Testing setup function (mocked)...") + + try: + with patch('extended.client.Client') as mock_client_class: + # Setup mock client + mock_client = Mock() + mock_client.public_key = "0x789456123" + mock_client.info = Mock() + mock_client.exchange = Mock() + mock_client_class.return_value = mock_client + + # Test setup function + from extended.setup import setup + + address, info, exchange = setup( + api_key="test_key", + vault=12345, + stark_private_key="0xprivate", + stark_public_key="0xpublic", + testnet=True + ) + + # Verify return values + assert address == "0x789456123" + assert info is mock_client.info + assert exchange is mock_client.exchange + + # Verify Client was called correctly + mock_client_class.assert_called_once_with( + api_key="test_key", + vault=12345, + stark_private_key="0xprivate", + stark_public_key="0xpublic", + testnet=True, + base_url=None + ) + + print(" ✅ Setup function works correctly") + return True + + except Exception as e: + print(f" ❌ Setup function test failed: {e}") + traceback.print_exc() + return False + + +def test_threadpool_compatibility(): + """Test that native sync works in ThreadPoolExecutor (the critical test).""" + print("🧪 Testing ThreadPoolExecutor compatibility...") + + def worker_task(worker_id): + """Task to run in ThreadPoolExecutor.""" + try: + # Mock the problematic dependencies + with patch('extended.api.base_native_sync.SimpleSyncAuth') as mock_auth_class: + with patch('extended.api.base_native_sync.SimpleSyncConfig') as mock_config_class: + # Setup mocks + mock_auth = Mock() + mock_auth.api_key = f"test_key_{worker_id}" + mock_auth_class.return_value = mock_auth + + mock_config = Mock() + mock_config.api_base_url = "https://api.extended.com" + mock_config_class.return_value = mock_config + + # Import and instantiate in worker thread + from extended.api.base_native_sync import BaseNativeSyncClient + + client = BaseNativeSyncClient( + auth=mock_auth, + config=mock_config, + timeout=30 + ) + + # Verify the client has a requests session + assert hasattr(client, 'session'), f"Worker {worker_id}: No session attribute" + + # Try to access session (should not cause event loop issues) + session = client.session + assert session is not None, f"Worker {worker_id}: Session is None" + + return f"Worker {worker_id}: SUCCESS - Native sync works in thread" + + except Exception as e: + return f"Worker {worker_id}: FAILED - {e}" + + try: + # Test with multiple workers (simulates real ThreadPoolExecutor usage) + with ThreadPoolExecutor(max_workers=3) as executor: + futures = [executor.submit(worker_task, i) for i in range(3)] + results = [future.result(timeout=10) for future in futures] + + # Check results + for result in results: + print(f" {result}") + if "FAILED" in result: + return False + + print(" ✅ ThreadPoolExecutor compatibility verified") + return True + + except Exception as e: + print(f" ❌ ThreadPoolExecutor test failed: {e}") + traceback.print_exc() + return False + + +def test_api_method_signatures(): + """Test that API methods have correct signatures (no async).""" + print("🧪 Testing API method signatures...") + + try: + import inspect + from extended.api.info_native_sync import NativeSyncInfoAPI + from extended.api.exchange_native_sync import NativeSyncExchangeAPI + + # Check InfoAPI methods are not async + info_methods = [ + 'user_state', 'open_orders', 'meta', 'all_mids', + 'user_fills', 'l2_snapshot' + ] + + for method_name in info_methods: + if hasattr(NativeSyncInfoAPI, method_name): + method = getattr(NativeSyncInfoAPI, method_name) + if inspect.iscoroutinefunction(method): + print(f" ❌ InfoAPI.{method_name} is async (should be sync)") + return False + + # Check ExchangeAPI methods are not async + exchange_methods = [ + 'order', 'cancel', 'bulk_orders', 'update_leverage', + 'market_open', 'market_close' + ] + + for method_name in exchange_methods: + if hasattr(NativeSyncExchangeAPI, method_name): + method = getattr(NativeSyncExchangeAPI, method_name) + if inspect.iscoroutinefunction(method): + print(f" ❌ ExchangeAPI.{method_name} is async (should be sync)") + return False + + print(" ✅ All API methods are properly sync") + return True + + except Exception as e: + print(f" ❌ API method signature test failed: {e}") + return False + + +def test_integration_with_trading_engine_setup(): + """Test compatibility with existing trading engine setup pattern.""" + print("🧪 Testing trading engine setup compatibility...") + + try: + # Mock the setup function to simulate real usage pattern + with patch('extended.setup.Client') as mock_client_class: + mock_client = Mock() + mock_client.public_key = "0xTEST_ADDRESS" + + # Mock info and exchange as native sync objects + mock_info = Mock() + mock_exchange = Mock() + mock_client.info = mock_info + mock_client.exchange = mock_exchange + + mock_client_class.return_value = mock_client + + # Simulate the exact pattern used in helpers.py + from extended.setup import setup as extended_setup + + # Test the exact call pattern from helpers.py + address, info, exchange = extended_setup( + api_key="test_api_key", + vault=12345, + stark_private_key="0xprivate", + stark_public_key="0xpublic", + testnet=False, # testnet=not is_mainnet + base_url=None, + ) + + # Verify the return format matches Hyperliquid/Pacifica + assert isinstance(address, str), "Address should be string" + assert info is mock_info, "Info should be the mock info object" + assert exchange is mock_exchange, "Exchange should be the mock exchange object" + + # Verify the client was instantiated with correct parameters + mock_client_class.assert_called_once_with( + api_key="test_api_key", + vault=12345, + stark_private_key="0xprivate", + stark_public_key="0xpublic", + testnet=False, + base_url=None, + ) + + print(" ✅ Trading engine setup pattern compatibility verified") + return True + + except Exception as e: + print(f" ❌ Trading engine setup test failed: {e}") + traceback.print_exc() + return False + + +def run_all_tests(): + """Run all comprehensive tests.""" + print("🚀 COMPREHENSIVE NATIVE SYNC EXTENDED SDK TESTS") + print("=" * 70) + + tests = [ + ("Import Structure", test_imports_and_structure), + ("Async Dependencies", test_no_async_dependencies), + ("HTTP Client", test_requests_usage), + ("Client Instantiation", test_client_instantiation_mocked), + ("Setup Function", test_setup_function_mocked), + ("ThreadPoolExecutor", test_threadpool_compatibility), + ("API Signatures", test_api_method_signatures), + ("Trading Engine Setup", test_integration_with_trading_engine_setup), + ] + + results = [] + for test_name, test_func in tests: + print(f"\n{test_name}:") + try: + result = test_func() + results.append((test_name, result)) + except Exception as e: + print(f" ❌ {test_name} crashed: {e}") + results.append((test_name, False)) + + # Summary + print("\n" + "=" * 70) + print("📊 TEST RESULTS SUMMARY:") + + passed = 0 + total = len(results) + + for test_name, result in results: + status = "✅ PASS" if result else "❌ FAIL" + print(f" {status}: {test_name}") + if result: + passed += 1 + + print(f"\n🏆 FINAL SCORE: {passed}/{total} tests passed") + + if passed == total: + print("🎉 ALL TESTS PASSED!") + print("✅ Native Sync Extended SDK is READY FOR PRODUCTION") + print("✅ Compatible with ThreadPoolExecutor") + print("✅ Same API as Hyperliquid/Pacifica") + print("✅ No async/await dependencies") + return True + else: + print("❌ Some tests failed - review issues above") + return False + + +if __name__ == "__main__": + success = run_all_tests() + sys.exit(0 if success else 1) \ No newline at end of file diff --git a/test_native_sync.py b/test_native_sync.py new file mode 100644 index 0000000..903ad75 --- /dev/null +++ b/test_native_sync.py @@ -0,0 +1,157 @@ +""" +Test script for Native Sync Extended SDK. + +Verifies that the native sync implementation works without async/await issues. +""" + +import sys +import traceback +from unittest.mock import Mock, patch + +def test_import_structure(): + """Test that imports work without async dependencies.""" + print("Testing import structure...") + + try: + # Test core imports + from extended.api.base_native_sync import BaseNativeSyncClient + from extended.api.info_native_sync import NativeSyncInfoAPI + from extended.api.exchange_native_sync import NativeSyncExchangeAPI + + # Test public API imports + from extended.api.info import InfoAPI + from extended.api.exchange import ExchangeAPI + from extended.client import Client + + print("✅ All imports successful - no async dependencies detected") + return True + + except Exception as e: + print(f"❌ Import failed: {e}") + traceback.print_exc() + return False + +def test_client_instantiation(): + """Test that client can be created without async issues.""" + print("\nTesting client instantiation...") + + try: + # Mock the ExtendedAuth to avoid needing real credentials + with patch('extended.auth.ExtendedAuth') as mock_auth: + mock_auth_instance = Mock() + mock_auth_instance.address = "0x123456789" + mock_auth_instance.stark_public_key = "0x987654321" + mock_auth_instance.api_key = "test_api_key" + mock_auth.return_value = mock_auth_instance + + # Mock the config + with patch('extended.config.MAINNET_CONFIG') as mock_config: + mock_config.api_base_url = "https://api.extended.com" + + client = Client( + api_key="test_api_key", + vault=12345, + stark_private_key="0xprivate", + stark_public_key="0xpublic", + testnet=False + ) + + # Verify properties work + assert client.address == "0x123456789" + assert client.public_key == "0x987654321" + assert client.info is not None + assert client.exchange is not None + + print("✅ Client instantiation successful - native sync working") + return True + + except Exception as e: + print(f"❌ Client instantiation failed: {e}") + traceback.print_exc() + return False + +def test_no_run_sync_usage(): + """Test that run_sync is not used anywhere in the new implementation.""" + print("\nTesting for run_sync usage...") + + import os + import re + + sdk_path = "/tmp/extended-sdk-analysis/extended" + run_sync_pattern = r"run_sync\(" + + files_with_run_sync = [] + + for root, dirs, files in os.walk(sdk_path): + for file in files: + if file.endswith('.py') and not file.endswith('_old_async_wrapper.py'): + file_path = os.path.join(root, file) + try: + with open(file_path, 'r') as f: + content = f.read() + if re.search(run_sync_pattern, content): + files_with_run_sync.append(file_path) + except: + pass + + if files_with_run_sync: + print(f"❌ Found run_sync usage in: {files_with_run_sync}") + return False + else: + print("✅ No run_sync usage found in native sync implementation") + return True + +def test_requests_usage(): + """Test that requests is used instead of aiohttp.""" + print("\nTesting HTTP client usage...") + + try: + from extended.api.base_native_sync import BaseNativeSyncClient + + # Check that BaseNativeSyncClient uses requests + import inspect + source = inspect.getsource(BaseNativeSyncClient.__init__) + + if 'requests.Session()' in source: + print("✅ Uses requests.Session() - correct sync HTTP client") + return True + else: + print("❌ Does not use requests.Session()") + return False + + except Exception as e: + print(f"❌ HTTP client test failed: {e}") + return False + +def run_all_tests(): + """Run all tests and return overall success.""" + print("🚀 Testing Native Sync Extended SDK Implementation") + print("=" * 60) + + tests = [ + test_import_structure, + test_client_instantiation, + test_no_run_sync_usage, + test_requests_usage, + ] + + results = [] + for test in tests: + results.append(test()) + + print("\n" + "=" * 60) + + if all(results): + print("🎉 ALL TESTS PASSED - Native Sync Extended SDK is ready!") + print("✅ No async/await dependencies") + print("✅ No run_sync() usage") + print("✅ Uses requests.Session() for HTTP") + print("✅ Same API surface as original SDK") + return True + else: + print("❌ Some tests failed - see details above") + return False + +if __name__ == "__main__": + success = run_all_tests() + sys.exit(0 if success else 1) \ No newline at end of file diff --git a/tests/conftest.py b/tests/conftest.py index 7891862..78d0f4e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,16 @@ +import sys +from pathlib import Path + import pytest +def pytest_configure(config): + """Add project root to sys.path for extended module.""" + project_root = Path(__file__).parent.parent + if str(project_root) not in sys.path: + sys.path.insert(0, str(project_root)) + + @pytest.fixture def create_accounts(): from tests.fixtures.accounts import create_accounts as _create_accounts diff --git a/tests/extended/__init__.py b/tests/extended/__init__.py new file mode 100644 index 0000000..5535793 --- /dev/null +++ b/tests/extended/__init__.py @@ -0,0 +1 @@ +# Extended SDK Tests diff --git a/tests/extended/conftest.py b/tests/extended/conftest.py new file mode 100644 index 0000000..926a07a --- /dev/null +++ b/tests/extended/conftest.py @@ -0,0 +1,16 @@ +""" +Pytest configuration for Extended SDK tests. + +Adds the project root to sys.path to ensure extended module is importable. +""" + +import sys +from pathlib import Path + +# Add the project root to sys.path +project_root = Path(__file__).parent.parent.parent +if str(project_root) not in sys.path: + sys.path.insert(0, str(project_root)) + +# Import fixtures from fixtures module +from tests.extended.fixtures import * # noqa: F401, F403 diff --git a/tests/extended/fixtures.py b/tests/extended/fixtures.py new file mode 100644 index 0000000..2d0758d --- /dev/null +++ b/tests/extended/fixtures.py @@ -0,0 +1,412 @@ +""" +Test fixtures for Extended SDK tests. + +Provides sample data structures for testing transformers and APIs. +""" + +from decimal import Decimal +from typing import Any, Dict, List, Optional + +import pytest + +from x10.perpetual.balances import BalanceModel +from x10.perpetual.candles import CandleModel +from x10.perpetual.markets import ( + MarketModel, + MarketStatsModel, + TradingConfigModel, + L2ConfigModel, + RiskFactorConfig, +) +from x10.perpetual.orderbooks import OrderbookUpdateModel, OrderbookQuantityModel +from x10.perpetual.orders import ( + OpenOrderModel, + PlacedOrderModel, + OrderSide, + OrderType, + TimeInForce, + OrderStatus, +) +from x10.perpetual.positions import PositionModel, PositionSide, PositionStatus +from x10.perpetual.trades import AccountTradeModel, TradeType + + +def create_mock_balance() -> BalanceModel: + """Create a mock balance for testing.""" + return BalanceModel( + collateral_name="USD", + balance=Decimal("10000.00"), + equity=Decimal("10500.00"), + available_for_trade=Decimal("8000.00"), + available_for_withdrawal=Decimal("7500.00"), + unrealised_pnl=Decimal("500.00"), + initial_margin=Decimal("2000.00"), + margin_ratio=Decimal("0.20"), + updated_time=1700000000000, + ) + + +def create_mock_position( + market: str = "BTC-USD", + side: str = "LONG", + size: Decimal = Decimal("0.5"), + open_price: Decimal = Decimal("50000.00"), + value: Decimal = Decimal("25000.00"), + unrealised_pnl: Decimal = Decimal("500.00"), + leverage: int = 10, +) -> PositionModel: + """Create a mock position for testing.""" + return PositionModel( + id=1001, + account_id=1001, + market=market, + status=PositionStatus.OPENED, + side=PositionSide(side), + size=size, + open_price=open_price, + mark_price=Decimal("50500.00"), + value=value, + unrealised_pnl=unrealised_pnl, + realised_pnl=Decimal("0"), + leverage=Decimal(leverage), + liquidation_price=Decimal("45000.00"), + created_at=1700000000000, + updated_at=1700000000000, + ) + + +def create_mock_positions() -> List[PositionModel]: + """Create a list of mock positions for testing.""" + return [ + create_mock_position( + market="BTC-USD", + side="LONG", + size=Decimal("0.5"), + open_price=Decimal("50000.00"), + value=Decimal("25000.00"), + unrealised_pnl=Decimal("500.00"), + leverage=10, + ), + create_mock_position( + market="ETH-USD", + side="SHORT", + size=Decimal("5.0"), + open_price=Decimal("3000.00"), + value=Decimal("15000.00"), + unrealised_pnl=Decimal("-200.00"), + leverage=5, + ), + ] + + +def create_mock_market_stats() -> MarketStatsModel: + """Create mock market stats for testing.""" + return MarketStatsModel( + daily_volume=Decimal("1000000.00"), + daily_volume_base=Decimal("20.00"), + daily_price_change=Decimal("500.00"), + daily_price_change_percentage=Decimal("0.01"), + daily_low=Decimal("49000.00"), + daily_high=Decimal("51000.00"), + last_price=Decimal("50500.00"), + ask_price=Decimal("50510.00"), + bid_price=Decimal("50490.00"), + mark_price=Decimal("50500.00"), + index_price=Decimal("50500.00"), + funding_rate=Decimal("0.0001"), + next_funding_rate=1700000000000, + open_interest=Decimal("5000000.00"), + open_interest_base=Decimal("100.00"), + ) + + +def create_mock_trading_config() -> TradingConfigModel: + """Create mock trading config for testing.""" + return TradingConfigModel( + min_order_size=Decimal("0.001"), + min_order_size_change=Decimal("0.00001"), + min_price_change=Decimal("0.1"), + max_market_order_value=Decimal("1000000"), + max_limit_order_value=Decimal("5000000"), + max_position_value=Decimal("10000000"), + max_leverage=Decimal("50"), + max_num_orders=200, + limit_price_cap=Decimal("0.05"), + limit_price_floor=Decimal("0.05"), + risk_factor_config=[ + RiskFactorConfig(upper_bound=Decimal("400000"), risk_factor=Decimal("0.02")), + ], + ) + + +def create_mock_l2_config() -> L2ConfigModel: + """Create mock L2 config for testing.""" + return L2ConfigModel( + type="STARKX", + collateral_id="0x31857064564ed0ff978e687456963cba09c2c6985d8f9300a1de4962fafa054", + collateral_resolution=1000000, + synthetic_id="0x4254432d3600000000000000000000", + synthetic_resolution=1000000, + ) + + +def create_mock_market(name: str = "BTC-USD", active: bool = True) -> MarketModel: + """Create a mock market for testing.""" + return MarketModel( + name=name, + category="L1", + asset_name=name.split("-")[0], + asset_precision=5, + collateral_asset_name="USD", + collateral_asset_precision=6, + active=active, + market_stats=create_mock_market_stats(), + trading_config=create_mock_trading_config(), + l2_config=create_mock_l2_config(), + ) + + +def create_mock_markets() -> List[MarketModel]: + """Create a list of mock markets for testing.""" + return [ + create_mock_market("BTC-USD"), + create_mock_market("ETH-USD"), + create_mock_market("SOL-USD"), + ] + + +def create_mock_orderbook() -> OrderbookUpdateModel: + """Create a mock orderbook for testing.""" + return OrderbookUpdateModel( + market="BTC-USD", + bid=[ + OrderbookQuantityModel(price=Decimal("50490.00"), qty=Decimal("1.5")), + OrderbookQuantityModel(price=Decimal("50480.00"), qty=Decimal("2.0")), + OrderbookQuantityModel(price=Decimal("50470.00"), qty=Decimal("3.5")), + ], + ask=[ + OrderbookQuantityModel(price=Decimal("50510.00"), qty=Decimal("1.2")), + OrderbookQuantityModel(price=Decimal("50520.00"), qty=Decimal("2.5")), + OrderbookQuantityModel(price=Decimal("50530.00"), qty=Decimal("4.0")), + ], + ) + + +def create_mock_candles() -> List[CandleModel]: + """Create mock candles for testing.""" + base_ts = 1700000000000 + return [ + CandleModel( + timestamp=base_ts, + open=Decimal("50000.00"), + high=Decimal("50100.00"), + low=Decimal("49900.00"), + close=Decimal("50050.00"), + volume=Decimal("100.5"), + ), + CandleModel( + timestamp=base_ts + 60000, + open=Decimal("50050.00"), + high=Decimal("50150.00"), + low=Decimal("50000.00"), + close=Decimal("50100.00"), + volume=Decimal("150.2"), + ), + CandleModel( + timestamp=base_ts + 120000, + open=Decimal("50100.00"), + high=Decimal("50200.00"), + low=Decimal("50050.00"), + close=Decimal("50180.00"), + volume=Decimal("120.8"), + ), + ] + + +def create_mock_open_order( + order_id: int = 12345, + market: str = "BTC-USD", + side: str = "BUY", + price: Decimal = Decimal("50000.00"), + qty: Decimal = Decimal("0.1"), + filled_qty: Optional[Decimal] = None, + external_id: str = "test-order-001", +) -> OpenOrderModel: + """Create a mock open order for testing.""" + return OpenOrderModel( + id=order_id, + account_id=1001, + external_id=external_id, + market=market, + type=OrderType.LIMIT, + side=OrderSide(side), + status=OrderStatus.NEW, + price=price, + qty=qty, + filled_qty=filled_qty, + reduce_only=False, + post_only=False, + time_in_force=TimeInForce.GTT, + created_time=1700000000000, + updated_time=1700000000000, + ) + + +def create_mock_open_orders() -> List[OpenOrderModel]: + """Create a list of mock open orders for testing.""" + return [ + create_mock_open_order( + order_id=12345, + market="BTC-USD", + side="BUY", + price=Decimal("50000.00"), + qty=Decimal("0.1"), + external_id="test-order-001", + ), + create_mock_open_order( + order_id=12346, + market="ETH-USD", + side="SELL", + price=Decimal("3000.00"), + qty=Decimal("1.0"), + filled_qty=Decimal("0.5"), + external_id="test-order-002", + ), + ] + + +def create_mock_placed_order( + order_id: int = 12345, + external_id: str = "test-order-001", +) -> PlacedOrderModel: + """Create a mock placed order response for testing.""" + return PlacedOrderModel( + id=order_id, + external_id=external_id, + ) + + +def create_mock_trade( + trade_id: int = 98765, + order_id: int = 12345, + market: str = "BTC-USD", + side: str = "BUY", + price: Decimal = Decimal("50000.00"), + qty: Decimal = Decimal("0.1"), + fee: Decimal = Decimal("2.50"), + is_taker: bool = True, +) -> AccountTradeModel: + """Create a mock trade for testing.""" + return AccountTradeModel( + id=trade_id, + account_id=1001, + market=market, + order_id=order_id, + side=OrderSide(side), + price=price, + qty=qty, + value=price * qty, + fee=fee, + is_taker=is_taker, + trade_type=TradeType.TRADE, + created_time=1700000000000, + ) + + +def create_mock_trades() -> List[AccountTradeModel]: + """Create a list of mock trades for testing.""" + return [ + create_mock_trade( + trade_id=98765, + order_id=12345, + market="BTC-USD", + side="BUY", + price=Decimal("50000.00"), + qty=Decimal("0.1"), + fee=Decimal("2.50"), + is_taker=True, + ), + create_mock_trade( + trade_id=98766, + order_id=12346, + market="ETH-USD", + side="SELL", + price=Decimal("3000.00"), + qty=Decimal("0.5"), + fee=Decimal("0.75"), + is_taker=False, + ), + ] + + +# Pytest fixtures +@pytest.fixture +def mock_balance(): + """Fixture for mock balance.""" + return create_mock_balance() + + +@pytest.fixture +def mock_position(): + """Fixture for a single mock position.""" + return create_mock_position() + + +@pytest.fixture +def mock_positions(): + """Fixture for mock positions list.""" + return create_mock_positions() + + +@pytest.fixture +def mock_market(): + """Fixture for a single mock market.""" + return create_mock_market() + + +@pytest.fixture +def mock_markets(): + """Fixture for mock markets list.""" + return create_mock_markets() + + +@pytest.fixture +def mock_orderbook(): + """Fixture for mock orderbook.""" + return create_mock_orderbook() + + +@pytest.fixture +def mock_candles(): + """Fixture for mock candles.""" + return create_mock_candles() + + +@pytest.fixture +def mock_open_order(): + """Fixture for a single mock open order.""" + return create_mock_open_order() + + +@pytest.fixture +def mock_open_orders(): + """Fixture for mock open orders list.""" + return create_mock_open_orders() + + +@pytest.fixture +def mock_placed_order(): + """Fixture for mock placed order response.""" + return create_mock_placed_order() + + +@pytest.fixture +def mock_trade(): + """Fixture for a single mock trade.""" + return create_mock_trade() + + +@pytest.fixture +def mock_trades(): + """Fixture for mock trades list.""" + return create_mock_trades() diff --git a/tests/extended/test_account_transformer.py b/tests/extended/test_account_transformer.py new file mode 100644 index 0000000..0482675 --- /dev/null +++ b/tests/extended/test_account_transformer.py @@ -0,0 +1,199 @@ +""" +Unit tests for AccountTransformer. + +Tests transformation of Extended account data to Hyperliquid format. +""" + +import pytest +from decimal import Decimal +from hamcrest import assert_that, equal_to, has_key, has_length, has_entries + +from extended.transformers.account import AccountTransformer +from tests.extended.fixtures import ( + create_mock_balance, + create_mock_position, + create_mock_positions, +) + + +class TestAccountTransformerUserState: + """Tests for transform_user_state method.""" + + def test_transform_user_state_structure(self): + """Test user_state has correct Hyperliquid structure.""" + balance = create_mock_balance() + positions = create_mock_positions() + + result = AccountTransformer.transform_user_state(balance, positions) + + assert_that(result, has_key("assetPositions")) + assert_that(result, has_key("crossMaintenanceMarginUsed")) + assert_that(result, has_key("crossMarginSummary")) + assert_that(result, has_key("marginSummary")) + assert_that(result, has_key("withdrawable")) + + def test_transform_user_state_balance_values(self): + """Test balance values are correctly transformed.""" + balance = create_mock_balance() + positions = [] + + result = AccountTransformer.transform_user_state(balance, positions) + + assert_that(result["withdrawable"], equal_to("8000.00")) # available_for_trade + assert_that(result["crossMaintenanceMarginUsed"], equal_to("2000.00")) + + margin_summary = result["marginSummary"] + assert_that(margin_summary["accountValue"], equal_to("10500.00")) # equity + assert_that(margin_summary["totalMarginUsed"], equal_to("2000.00")) + assert_that(margin_summary["totalRawUsd"], equal_to("10000.00")) # balance + assert_that(margin_summary["withdrawable"], equal_to("8000.00")) + + def test_transform_user_state_with_positions(self): + """Test user_state with positions.""" + balance = create_mock_balance() + positions = create_mock_positions() + + result = AccountTransformer.transform_user_state(balance, positions) + + assert_that(result["assetPositions"], has_length(2)) + + # Total position value should be sum of all positions + cross_summary = result["crossMarginSummary"] + expected_total = str(Decimal("25000.00") + Decimal("15000.00")) + assert_that(cross_summary["totalNtlPos"], equal_to(expected_total)) + + def test_transform_user_state_empty_positions(self): + """Test user_state with no positions.""" + balance = create_mock_balance() + positions = [] + + result = AccountTransformer.transform_user_state(balance, positions) + + assert_that(result["assetPositions"], equal_to([])) + assert_that(result["crossMarginSummary"]["totalNtlPos"], equal_to("0")) + + +class TestAccountTransformerPosition: + """Tests for transform_position method.""" + + def test_transform_position_structure(self): + """Test position has correct Hyperliquid structure.""" + position = create_mock_position() + result = AccountTransformer.transform_position(position) + + assert_that(result, has_key("position")) + assert_that(result, has_key("type")) + assert_that(result["type"], equal_to("oneWay")) + + pos = result["position"] + assert_that(pos, has_key("coin")) + assert_that(pos, has_key("szi")) + assert_that(pos, has_key("leverage")) + assert_that(pos, has_key("entryPx")) + assert_that(pos, has_key("positionValue")) + assert_that(pos, has_key("unrealizedPnl")) + assert_that(pos, has_key("liquidationPx")) + assert_that(pos, has_key("marginUsed")) + assert_that(pos, has_key("returnOnEquity")) + + def test_transform_position_long(self): + """Test LONG position has positive szi.""" + position = create_mock_position( + market="BTC-USD", + side="LONG", + size=Decimal("0.5"), + ) + result = AccountTransformer.transform_position(position) + + assert_that(result["position"]["coin"], equal_to("BTC")) + assert_that(result["position"]["szi"], equal_to("0.5")) # Positive for LONG + + def test_transform_position_short(self): + """Test SHORT position has negative szi.""" + position = create_mock_position( + market="ETH-USD", + side="SHORT", + size=Decimal("5.0"), + ) + result = AccountTransformer.transform_position(position) + + assert_that(result["position"]["coin"], equal_to("ETH")) + assert_that(result["position"]["szi"], equal_to("-5.0")) # Negative for SHORT + + def test_transform_position_leverage(self): + """Test leverage is correctly formatted.""" + position = create_mock_position(leverage=10) + result = AccountTransformer.transform_position(position) + + leverage = result["position"]["leverage"] + assert_that(leverage["type"], equal_to("cross")) + assert_that(leverage["value"], equal_to(10)) + + def test_transform_position_values(self): + """Test position values are correctly transformed.""" + position = create_mock_position( + open_price=Decimal("50000.00"), + value=Decimal("25000.00"), + unrealised_pnl=Decimal("500.00"), + ) + result = AccountTransformer.transform_position(position) + + pos = result["position"] + assert_that(pos["entryPx"], equal_to("50000.00")) + assert_that(pos["positionValue"], equal_to("25000.00")) + assert_that(pos["unrealizedPnl"], equal_to("500.00")) + assert_that(pos["liquidationPx"], equal_to("45000.00")) + + def test_transform_position_margin_used_calculation(self): + """Test margin used is calculated from value/leverage.""" + position = create_mock_position( + value=Decimal("25000.00"), + leverage=10, + ) + result = AccountTransformer.transform_position(position) + + # marginUsed = value / leverage = 25000 / 10 = 2500 + assert_that(result["position"]["marginUsed"], equal_to("2500.00")) + + def test_transform_position_roe_calculation(self): + """Test return on equity is calculated correctly.""" + position = create_mock_position( + value=Decimal("25000.00"), + unrealised_pnl=Decimal("500.00"), + leverage=10, + ) + result = AccountTransformer.transform_position(position) + + # marginUsed = 25000 / 10 = 2500 + # ROE = 500 / 2500 = 0.2 + assert_that(result["position"]["returnOnEquity"], equal_to("0.2")) + + +class TestAccountTransformerBalance: + """Tests for transform_balance method.""" + + def test_transform_balance_structure(self): + """Test balance has all expected fields.""" + balance = create_mock_balance() + result = AccountTransformer.transform_balance(balance) + + assert_that(result, has_key("balance")) + assert_that(result, has_key("equity")) + assert_that(result, has_key("available_for_trade")) + assert_that(result, has_key("available_for_withdrawal")) + assert_that(result, has_key("unrealised_pnl")) + assert_that(result, has_key("initial_margin")) + assert_that(result, has_key("margin_ratio")) + + def test_transform_balance_values(self): + """Test balance values are correctly transformed.""" + balance = create_mock_balance() + result = AccountTransformer.transform_balance(balance) + + assert_that(result["balance"], equal_to("10000.00")) + assert_that(result["equity"], equal_to("10500.00")) + assert_that(result["available_for_trade"], equal_to("8000.00")) + assert_that(result["available_for_withdrawal"], equal_to("7500.00")) + assert_that(result["unrealised_pnl"], equal_to("500.00")) + assert_that(result["initial_margin"], equal_to("2000.00")) + assert_that(result["margin_ratio"], equal_to("0.20")) diff --git a/tests/extended/test_auth.py b/tests/extended/test_auth.py new file mode 100644 index 0000000..9d0095c --- /dev/null +++ b/tests/extended/test_auth.py @@ -0,0 +1,226 @@ +""" +Unit tests for ExtendedAuth. + +Tests authentication module functionality. +""" + +import pytest +from hamcrest import assert_that, equal_to, is_not, none, instance_of + +from x10.perpetual.accounts import StarkPerpetualAccount +from x10.perpetual.configuration import TESTNET_CONFIG, MAINNET_CONFIG +from x10.perpetual.trading_client import PerpetualTradingClient + +from extended.auth import ExtendedAuth + + +# Test credentials +TEST_API_KEY = "test-api-key-12345" +TEST_VAULT = 10001 +TEST_PRIVATE_KEY = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" +TEST_PUBLIC_KEY = "0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890" + + +class TestExtendedAuthInit: + """Tests for ExtendedAuth initialization.""" + + def test_init_with_required_params(self): + """Test initialization with required parameters.""" + auth = ExtendedAuth( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + ) + + assert_that(auth.api_key, equal_to(TEST_API_KEY)) + assert_that(auth.vault, equal_to(TEST_VAULT)) + assert_that(auth.stark_private_key, equal_to(TEST_PRIVATE_KEY)) + assert_that(auth.stark_public_key, equal_to(TEST_PUBLIC_KEY)) + assert_that(auth.testnet, equal_to(False)) # Default + + def test_init_with_testnet_true(self): + """Test initialization with testnet=True.""" + auth = ExtendedAuth( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + testnet=True, + ) + + assert_that(auth.testnet, equal_to(True)) + + def test_init_with_testnet_false(self): + """Test initialization with testnet=False (explicit).""" + auth = ExtendedAuth( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + testnet=False, + ) + + assert_that(auth.testnet, equal_to(False)) + + +class TestExtendedAuthAddress: + """Tests for address property.""" + + def test_address_returns_public_key(self): + """Test that address property returns the stark public key.""" + auth = ExtendedAuth( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + ) + + assert_that(auth.address, equal_to(TEST_PUBLIC_KEY)) + + +class TestExtendedAuthConfig: + """Tests for get_config method.""" + + def test_get_config_testnet(self): + """Test getting testnet config.""" + auth = ExtendedAuth( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + testnet=True, + ) + + config = auth.get_config() + assert_that(config, equal_to(TESTNET_CONFIG)) + + def test_get_config_mainnet(self): + """Test getting mainnet config.""" + auth = ExtendedAuth( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + testnet=False, + ) + + config = auth.get_config() + assert_that(config, equal_to(MAINNET_CONFIG)) + + +class TestExtendedAuthStarkAccount: + """Tests for get_stark_account method.""" + + def test_get_stark_account_creates_account(self): + """Test that get_stark_account creates a StarkPerpetualAccount.""" + auth = ExtendedAuth( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + ) + + account = auth.get_stark_account() + + assert_that(account, is_not(none())) + assert_that(account, instance_of(StarkPerpetualAccount)) + assert_that(account.vault, equal_to(TEST_VAULT)) + assert_that(account.api_key, equal_to(TEST_API_KEY)) + + def test_get_stark_account_caches_instance(self): + """Test that get_stark_account returns the same instance on subsequent calls.""" + auth = ExtendedAuth( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + ) + + account1 = auth.get_stark_account() + account2 = auth.get_stark_account() + + assert_that(account1 is account2, equal_to(True)) + + +class TestExtendedAuthTradingClient: + """Tests for get_trading_client method.""" + + def test_get_trading_client_creates_client(self): + """Test that get_trading_client creates a PerpetualTradingClient.""" + auth = ExtendedAuth( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + ) + + client = auth.get_trading_client() + + assert_that(client, is_not(none())) + assert_that(client, instance_of(PerpetualTradingClient)) + + def test_get_trading_client_caches_instance(self): + """Test that get_trading_client returns the same instance on subsequent calls.""" + auth = ExtendedAuth( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + ) + + client1 = auth.get_trading_client() + client2 = auth.get_trading_client() + + assert_that(client1 is client2, equal_to(True)) + + def test_get_trading_client_uses_correct_config(self): + """Test that trading client uses the correct endpoint config.""" + auth_testnet = ExtendedAuth( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + testnet=True, + ) + + client = auth_testnet.get_trading_client() + # Client should be created with testnet config + assert_that(client, is_not(none())) + + +class TestExtendedAuthClose: + """Tests for close method.""" + + @pytest.mark.asyncio + async def test_close_without_client(self): + """Test closing auth without ever creating a trading client.""" + auth = ExtendedAuth( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + ) + + # Should not raise any exception + await auth.close() + + @pytest.mark.asyncio + async def test_close_clears_trading_client(self): + """Test that close clears the trading client reference.""" + auth = ExtendedAuth( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + ) + + # Create a trading client + client = auth.get_trading_client() + assert_that(auth._trading_client, is_not(none())) + + # Close + await auth.close() + + # Trading client should be cleared + assert_that(auth._trading_client, none()) diff --git a/tests/extended/test_client.py b/tests/extended/test_client.py new file mode 100644 index 0000000..ec9a87b --- /dev/null +++ b/tests/extended/test_client.py @@ -0,0 +1,227 @@ +""" +Unit tests for Extended SDK Client and setup functions. + +Tests Client, AsyncClient, setup(), and async_setup(). +""" + +import pytest +from hamcrest import assert_that, equal_to, is_not, none, instance_of + +from extended.client import Client +from extended.async_client import AsyncClient +from extended.setup import setup as extended_setup, async_setup +from extended.api.info import InfoAPI +from extended.api.info_async import AsyncInfoAPI +from extended.api.exchange import ExchangeAPI +from extended.api.exchange_async import AsyncExchangeAPI + + +# Test credentials +TEST_API_KEY = "test-api-key-12345" +TEST_VAULT = 10001 +TEST_PRIVATE_KEY = "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" +TEST_PUBLIC_KEY = "0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890" + + +class TestClientInit: + """Tests for Client initialization.""" + + def test_client_init_with_required_params(self): + """Test client initialization with required parameters.""" + client = Client( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + ) + + assert_that(client.address, equal_to(TEST_PUBLIC_KEY)) + assert_that(client.public_key, equal_to(TEST_PUBLIC_KEY)) + + def test_client_init_with_testnet(self): + """Test client initialization with testnet=True.""" + client = Client( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + testnet=True, + ) + + assert_that(client, is_not(none())) + + def test_client_init_with_timeout(self): + """Test client initialization with custom timeout.""" + client = Client( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + timeout=60, + ) + + assert_that(client._timeout, equal_to(60)) + + +class TestClientProperties: + """Tests for Client properties.""" + + def test_info_property(self): + """Test info property returns InfoAPI.""" + client = Client( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + ) + + assert_that(client.info, is_not(none())) + assert_that(client.info, instance_of(InfoAPI)) + + def test_exchange_property(self): + """Test exchange property returns ExchangeAPI.""" + client = Client( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + ) + + assert_that(client.exchange, is_not(none())) + assert_that(client.exchange, instance_of(ExchangeAPI)) + + def test_address_property(self): + """Test address property returns stark public key.""" + client = Client( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + ) + + assert_that(client.address, equal_to(TEST_PUBLIC_KEY)) + + def test_public_key_property(self): + """Test public_key property returns stark public key.""" + client = Client( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + ) + + assert_that(client.public_key, equal_to(TEST_PUBLIC_KEY)) + + +class TestClientClose: + """Tests for Client close method.""" + + def test_client_close(self): + """Test that close() runs without error.""" + client = Client( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + ) + + # Should not raise + client.close() + + +class TestAsyncClientInit: + """Tests for AsyncClient initialization.""" + + def test_async_client_init(self): + """Test async client initialization.""" + client = AsyncClient( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + ) + + assert_that(client.address, equal_to(TEST_PUBLIC_KEY)) + assert_that(client.public_key, equal_to(TEST_PUBLIC_KEY)) + + def test_async_client_info_property(self): + """Test async client info property returns AsyncInfoAPI.""" + client = AsyncClient( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + ) + + assert_that(client.info, is_not(none())) + assert_that(client.info, instance_of(AsyncInfoAPI)) + + def test_async_client_exchange_property(self): + """Test async client exchange property returns AsyncExchangeAPI.""" + client = AsyncClient( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + ) + + assert_that(client.exchange, is_not(none())) + assert_that(client.exchange, instance_of(AsyncExchangeAPI)) + + +class TestSetupFunction: + """Tests for extended_setup() function.""" + + def test_setup_returns_tuple(self): + """Test extended_setup() returns (address, info, exchange) tuple.""" + address, info, exchange = extended_setup( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + ) + + assert_that(address, equal_to(TEST_PUBLIC_KEY)) + assert_that(info, instance_of(InfoAPI)) + assert_that(exchange, instance_of(ExchangeAPI)) + + def test_setup_with_testnet(self): + """Test extended_setup() with testnet=True.""" + address, info, exchange = extended_setup( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + testnet=True, + ) + + assert_that(address, equal_to(TEST_PUBLIC_KEY)) + + +class TestAsyncSetupFunction: + """Tests for async_setup() function.""" + + def test_async_setup_returns_tuple(self): + """Test async_setup() returns (address, info, exchange) tuple.""" + address, info, exchange = async_setup( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + ) + + assert_that(address, equal_to(TEST_PUBLIC_KEY)) + assert_that(info, instance_of(AsyncInfoAPI)) + assert_that(exchange, instance_of(AsyncExchangeAPI)) + + def test_async_setup_with_testnet(self): + """Test async_setup() with testnet=True.""" + address, info, exchange = async_setup( + api_key=TEST_API_KEY, + vault=TEST_VAULT, + stark_private_key=TEST_PRIVATE_KEY, + stark_public_key=TEST_PUBLIC_KEY, + testnet=True, + ) + + assert_that(address, equal_to(TEST_PUBLIC_KEY)) diff --git a/tests/extended/test_helpers.py b/tests/extended/test_helpers.py new file mode 100644 index 0000000..79f220a --- /dev/null +++ b/tests/extended/test_helpers.py @@ -0,0 +1,181 @@ +""" +Unit tests for Extended SDK helper functions. + +Tests utility functions in extended.utils.helpers. +""" + +import pytest +from decimal import Decimal +from hamcrest import assert_that, equal_to + +from x10.perpetual.orders import TimeInForce as X10TimeInForce + +from extended.utils.helpers import ( + normalize_market_name, + to_hyperliquid_market_name, + parse_order_type, + parse_builder, + calculate_sz_decimals, +) + + +class TestNormalizeMarketName: + """Tests for normalize_market_name function.""" + + def test_normalize_without_suffix(self): + """Test adding -USD suffix to bare coin name.""" + assert_that(normalize_market_name("BTC"), equal_to("BTC-USD")) + assert_that(normalize_market_name("ETH"), equal_to("ETH-USD")) + assert_that(normalize_market_name("SOL"), equal_to("SOL-USD")) + + def test_normalize_with_suffix(self): + """Test preserving existing -USD suffix.""" + assert_that(normalize_market_name("BTC-USD"), equal_to("BTC-USD")) + assert_that(normalize_market_name("ETH-USD"), equal_to("ETH-USD")) + + def test_normalize_special_coins(self): + """Test normalizing special coin names.""" + assert_that(normalize_market_name("1000PEPE"), equal_to("1000PEPE-USD")) + assert_that(normalize_market_name("1000SHIB"), equal_to("1000SHIB-USD")) + + +class TestToHyperliquidMarketName: + """Tests for to_hyperliquid_market_name function.""" + + def test_strip_usd_suffix(self): + """Test stripping -USD suffix.""" + assert_that(to_hyperliquid_market_name("BTC-USD"), equal_to("BTC")) + assert_that(to_hyperliquid_market_name("ETH-USD"), equal_to("ETH")) + assert_that(to_hyperliquid_market_name("SOL-USD"), equal_to("SOL")) + + def test_already_stripped(self): + """Test names without -USD suffix are unchanged.""" + assert_that(to_hyperliquid_market_name("BTC"), equal_to("BTC")) + assert_that(to_hyperliquid_market_name("ETH"), equal_to("ETH")) + + def test_special_coins(self): + """Test stripping suffix from special coin names.""" + assert_that(to_hyperliquid_market_name("1000PEPE-USD"), equal_to("1000PEPE")) + assert_that(to_hyperliquid_market_name("1000SHIB-USD"), equal_to("1000SHIB")) + + +class TestParseOrderType: + """Tests for parse_order_type function.""" + + def test_parse_gtc_order_type(self): + """Test parsing GTC order type.""" + order_type = {"limit": {"tif": "Gtc"}} + tif, post_only = parse_order_type(order_type) + + assert_that(tif, equal_to(X10TimeInForce.GTT)) + assert_that(post_only, equal_to(False)) + + def test_parse_ioc_order_type(self): + """Test parsing IOC order type.""" + order_type = {"limit": {"tif": "Ioc"}} + tif, post_only = parse_order_type(order_type) + + assert_that(tif, equal_to(X10TimeInForce.IOC)) + assert_that(post_only, equal_to(False)) + + def test_parse_alo_order_type(self): + """Test parsing ALO (post-only) order type.""" + order_type = {"limit": {"tif": "Alo"}} + tif, post_only = parse_order_type(order_type) + + assert_that(tif, equal_to(X10TimeInForce.GTT)) + assert_that(post_only, equal_to(True)) + + def test_parse_order_type_default_tif(self): + """Test default TIF when not specified.""" + order_type = {"limit": {}} + tif, post_only = parse_order_type(order_type) + + assert_that(tif, equal_to(X10TimeInForce.GTT)) + assert_that(post_only, equal_to(False)) + + def test_parse_order_type_unknown_format(self): + """Test handling unknown order type format.""" + order_type = {"market": {}} + tif, post_only = parse_order_type(order_type) + + assert_that(tif, equal_to(X10TimeInForce.GTT)) + assert_that(post_only, equal_to(False)) + + +class TestParseBuilder: + """Tests for parse_builder function.""" + + def test_parse_builder_none(self): + """Test parsing None builder.""" + builder_id, builder_fee = parse_builder(None) + + assert_that(builder_id, equal_to(None)) + assert_that(builder_fee, equal_to(None)) + + def test_parse_builder_basic(self): + """Test parsing basic builder info.""" + builder = {"b": "123", "f": 10} # 1 bps = 0.0001 + builder_id, builder_fee = parse_builder(builder) + + assert_that(builder_id, equal_to(123)) + assert_that(builder_fee, equal_to(Decimal("0.0001"))) + + def test_parse_builder_fee_conversion(self): + """Test fee conversion from tenths of bps to decimal.""" + # f=1 -> 0.1 bps -> 0.000001 + builder1 = {"b": "1", "f": 1} + _, fee1 = parse_builder(builder1) + assert_that(fee1, equal_to(Decimal("0.00001"))) + + # f=10 -> 1 bps -> 0.0001 + builder2 = {"b": "1", "f": 10} + _, fee2 = parse_builder(builder2) + assert_that(fee2, equal_to(Decimal("0.0001"))) + + # f=50 -> 5 bps -> 0.0005 + builder3 = {"b": "1", "f": 50} + _, fee3 = parse_builder(builder3) + assert_that(fee3, equal_to(Decimal("0.0005"))) + + def test_parse_builder_no_fee(self): + """Test parsing builder without fee.""" + builder = {"b": "456"} + builder_id, builder_fee = parse_builder(builder) + + assert_that(builder_id, equal_to(456)) + assert_that(builder_fee, equal_to(Decimal("0"))) + + +class TestCalculateSzDecimals: + """Tests for calculate_sz_decimals function.""" + + def test_calculate_sz_decimals_five_places(self): + """Test calculating 5 decimal places.""" + result = calculate_sz_decimals(Decimal("0.00001")) + assert_that(result, equal_to(5)) + + def test_calculate_sz_decimals_three_places(self): + """Test calculating 3 decimal places.""" + result = calculate_sz_decimals(Decimal("0.001")) + assert_that(result, equal_to(3)) + + def test_calculate_sz_decimals_one_place(self): + """Test calculating 1 decimal place.""" + result = calculate_sz_decimals(Decimal("0.1")) + assert_that(result, equal_to(1)) + + def test_calculate_sz_decimals_zero_places(self): + """Test calculating 0 decimal places.""" + result = calculate_sz_decimals(Decimal("1")) + assert_that(result, equal_to(0)) + + def test_calculate_sz_decimals_zero_input(self): + """Test handling zero input.""" + result = calculate_sz_decimals(Decimal("0")) + assert_that(result, equal_to(0)) + + def test_calculate_sz_decimals_negative_input(self): + """Test handling negative input.""" + result = calculate_sz_decimals(Decimal("-0.001")) + assert_that(result, equal_to(0)) diff --git a/tests/extended/test_market_transformer.py b/tests/extended/test_market_transformer.py new file mode 100644 index 0000000..8ce5bcf --- /dev/null +++ b/tests/extended/test_market_transformer.py @@ -0,0 +1,189 @@ +""" +Unit tests for MarketTransformer. + +Tests transformation of Extended market data to Hyperliquid format. +""" + +import pytest +from decimal import Decimal +from hamcrest import assert_that, equal_to, has_key, has_length, has_entries, contains_inanyorder + +from extended.transformers.market import MarketTransformer +from tests.extended.fixtures import ( + create_mock_market, + create_mock_markets, + create_mock_orderbook, + create_mock_candles, +) + + +class TestMarketTransformerMeta: + """Tests for transform_meta method.""" + + def test_transform_meta_single_market(self): + """Test transforming a single market to meta format.""" + market = create_mock_market("BTC-USD") + result = MarketTransformer.transform_meta([market]) + + assert_that(result, has_key("universe")) + assert_that(result["universe"], has_length(1)) + + btc_entry = result["universe"][0] + assert_that(btc_entry["name"], equal_to("BTC")) # Stripped -USD + assert_that(btc_entry["szDecimals"], equal_to(5)) # From min_order_size_change + assert_that(btc_entry["maxLeverage"], equal_to(50)) + assert_that(btc_entry["onlyIsolated"], equal_to(False)) + + def test_transform_meta_multiple_markets(self): + """Test transforming multiple markets to meta format.""" + markets = create_mock_markets() + result = MarketTransformer.transform_meta(markets) + + assert_that(result, has_key("universe")) + assert_that(result["universe"], has_length(3)) + + names = [m["name"] for m in result["universe"]] + assert_that(names, contains_inanyorder("BTC", "ETH", "SOL")) + + def test_transform_meta_excludes_inactive_markets(self): + """Test that inactive markets are excluded from meta.""" + active_market = create_mock_market("BTC-USD", active=True) + inactive_market = create_mock_market("ETH-USD", active=False) + + result = MarketTransformer.transform_meta([active_market, inactive_market]) + + assert_that(result["universe"], has_length(1)) + assert_that(result["universe"][0]["name"], equal_to("BTC")) + + def test_transform_meta_empty_list(self): + """Test transforming empty markets list.""" + result = MarketTransformer.transform_meta([]) + + assert_that(result, equal_to({"universe": []})) + + +class TestMarketTransformerAllMids: + """Tests for transform_all_mids method.""" + + def test_transform_all_mids(self): + """Test transforming markets to mid prices dict.""" + markets = create_mock_markets() + result = MarketTransformer.transform_all_mids(markets) + + # Should have all markets + assert_that(len(result), equal_to(3)) + + # Check BTC mid price (bid=50490, ask=50510, mid=50500) + assert_that("BTC" in result, equal_to(True)) + assert_that(result["BTC"], equal_to("50500.00")) + + def test_transform_all_mids_market_name_conversion(self): + """Test that market names are converted to Hyperliquid format.""" + markets = create_mock_markets() + result = MarketTransformer.transform_all_mids(markets) + + # All names should be without -USD suffix + for name in result.keys(): + assert_that("-USD" in name, equal_to(False)) + + +class TestMarketTransformerL2Snapshot: + """Tests for transform_l2_snapshot method.""" + + def test_transform_l2_snapshot_structure(self): + """Test L2 snapshot has correct structure.""" + orderbook = create_mock_orderbook() + result = MarketTransformer.transform_l2_snapshot(orderbook) + + assert_that(result, has_key("coin")) + assert_that(result, has_key("levels")) + assert_that(result, has_key("time")) + + def test_transform_l2_snapshot_coin_name(self): + """Test coin name is converted to Hyperliquid format.""" + orderbook = create_mock_orderbook() + result = MarketTransformer.transform_l2_snapshot(orderbook) + + assert_that(result["coin"], equal_to("BTC")) + + def test_transform_l2_snapshot_levels(self): + """Test levels are correctly transformed.""" + orderbook = create_mock_orderbook() + result = MarketTransformer.transform_l2_snapshot(orderbook) + + # levels[0] = bids, levels[1] = asks + assert_that(len(result["levels"]), equal_to(2)) + + bids = result["levels"][0] + asks = result["levels"][1] + + assert_that(len(bids), equal_to(3)) + assert_that(len(asks), equal_to(3)) + + # Check first bid + assert_that(bids[0]["px"], equal_to("50490.00")) + assert_that(bids[0]["sz"], equal_to("1.5")) + assert_that(bids[0]["n"], equal_to(1)) + + # Check first ask + assert_that(asks[0]["px"], equal_to("50510.00")) + assert_that(asks[0]["sz"], equal_to("1.2")) + + def test_transform_l2_snapshot_custom_timestamp(self): + """Test custom timestamp is used when provided.""" + orderbook = create_mock_orderbook() + custom_ts = 1234567890000 + result = MarketTransformer.transform_l2_snapshot(orderbook, timestamp=custom_ts) + + assert_that(result["time"], equal_to(custom_ts)) + + +class TestMarketTransformerCandles: + """Tests for transform_candles method.""" + + def test_transform_candles_structure(self): + """Test candles have correct Hyperliquid structure.""" + candles = create_mock_candles() + result = MarketTransformer.transform_candles(candles, "BTC", "1m") + + assert_that(len(result), equal_to(3)) + + first_candle = result[0] + assert_that(first_candle, has_key("t")) # open timestamp + assert_that(first_candle, has_key("T")) # close timestamp + assert_that(first_candle, has_key("s")) # symbol + assert_that(first_candle, has_key("i")) # interval + assert_that(first_candle, has_key("o")) # open + assert_that(first_candle, has_key("c")) # close + assert_that(first_candle, has_key("h")) # high + assert_that(first_candle, has_key("l")) # low + assert_that(first_candle, has_key("v")) # volume + assert_that(first_candle, has_key("n")) # num trades + + def test_transform_candles_values(self): + """Test candle values are correctly transformed.""" + candles = create_mock_candles() + result = MarketTransformer.transform_candles(candles, "BTC", "1m") + + first_candle = result[0] + assert_that(first_candle["s"], equal_to("BTC")) + assert_that(first_candle["i"], equal_to("1m")) + assert_that(first_candle["o"], equal_to("50000.00")) + assert_that(first_candle["c"], equal_to("50050.00")) + assert_that(first_candle["h"], equal_to("50100.00")) + assert_that(first_candle["l"], equal_to("49900.00")) + assert_that(first_candle["v"], equal_to("100.5")) + + def test_transform_candles_close_timestamp(self): + """Test close timestamp is calculated correctly for 1m interval.""" + candles = create_mock_candles() + result = MarketTransformer.transform_candles(candles, "BTC", "1m") + + first_candle = result[0] + # Close timestamp = open + 60000ms (1 minute) + assert_that(first_candle["T"], equal_to(first_candle["t"] + 60000)) + + def test_transform_candles_empty_list(self): + """Test transforming empty candles list.""" + result = MarketTransformer.transform_candles([], "BTC", "1m") + assert_that(result, equal_to([])) diff --git a/tests/extended/test_order_transformer.py b/tests/extended/test_order_transformer.py new file mode 100644 index 0000000..566d57d --- /dev/null +++ b/tests/extended/test_order_transformer.py @@ -0,0 +1,281 @@ +""" +Unit tests for OrderTransformer. + +Tests transformation of Extended order/trade data to Hyperliquid format. +""" + +import pytest +from decimal import Decimal +from hamcrest import assert_that, equal_to, has_key, has_length + +from extended.transformers.order import OrderTransformer +from tests.extended.fixtures import ( + create_mock_open_order, + create_mock_open_orders, + create_mock_placed_order, + create_mock_trade, + create_mock_trades, +) + + +class TestOrderTransformerOpenOrders: + """Tests for transform_open_orders method.""" + + def test_transform_open_orders_list(self): + """Test transforming a list of open orders.""" + orders = create_mock_open_orders() + result = OrderTransformer.transform_open_orders(orders) + + assert_that(len(result), equal_to(2)) + + def test_transform_open_orders_empty_list(self): + """Test transforming empty orders list.""" + result = OrderTransformer.transform_open_orders([]) + assert_that(result, equal_to([])) + + +class TestOrderTransformerOpenOrder: + """Tests for transform_open_order method.""" + + def test_transform_open_order_structure(self): + """Test open order has correct Hyperliquid structure.""" + order = create_mock_open_order() + result = OrderTransformer.transform_open_order(order) + + assert_that(result, has_key("coin")) + assert_that(result, has_key("side")) + assert_that(result, has_key("limitPx")) + assert_that(result, has_key("sz")) + assert_that(result, has_key("oid")) + assert_that(result, has_key("timestamp")) + assert_that(result, has_key("origSz")) + assert_that(result, has_key("cloid")) + + def test_transform_open_order_values(self): + """Test open order values are correctly transformed.""" + order = create_mock_open_order( + order_id=12345, + market="BTC-USD", + side="BUY", + price=Decimal("50000.00"), + qty=Decimal("0.1"), + external_id="test-order-001", + ) + result = OrderTransformer.transform_open_order(order) + + assert_that(result["coin"], equal_to("BTC")) # Converted from BTC-USD + assert_that(result["side"], equal_to("B")) # BUY -> B + assert_that(result["limitPx"], equal_to("50000.00")) + assert_that(result["sz"], equal_to("0.1")) + assert_that(result["oid"], equal_to(12345)) + assert_that(result["origSz"], equal_to("0.1")) + assert_that(result["cloid"], equal_to("test-order-001")) + + def test_transform_open_order_sell_side(self): + """Test SELL order is transformed to 'A'.""" + order = create_mock_open_order(side="SELL") + result = OrderTransformer.transform_open_order(order) + + assert_that(result["side"], equal_to("A")) + + def test_transform_open_order_remaining_size(self): + """Test remaining size is calculated correctly.""" + order = create_mock_open_order( + qty=Decimal("1.0"), + filled_qty=Decimal("0.3"), + ) + result = OrderTransformer.transform_open_order(order) + + assert_that(result["sz"], equal_to("0.7")) # 1.0 - 0.3 + assert_that(result["origSz"], equal_to("1.0")) + + def test_transform_open_order_empty_external_id(self): + """Test order with empty external_id has None cloid.""" + order = create_mock_open_order(external_id="") + result = OrderTransformer.transform_open_order(order) + + # Empty string should be converted to None + assert_that(result["cloid"], equal_to(None)) + + +class TestOrderTransformerUserFills: + """Tests for transform_user_fills method.""" + + def test_transform_user_fills_list(self): + """Test transforming a list of fills.""" + trades = create_mock_trades() + result = OrderTransformer.transform_user_fills(trades) + + assert_that(len(result), equal_to(2)) + + def test_transform_user_fills_empty_list(self): + """Test transforming empty trades list.""" + result = OrderTransformer.transform_user_fills([]) + assert_that(result, equal_to([])) + + +class TestOrderTransformerFill: + """Tests for transform_fill method.""" + + def test_transform_fill_structure(self): + """Test fill has correct Hyperliquid structure.""" + trade = create_mock_trade() + result = OrderTransformer.transform_fill(trade) + + assert_that(result, has_key("coin")) + assert_that(result, has_key("px")) + assert_that(result, has_key("sz")) + assert_that(result, has_key("side")) + assert_that(result, has_key("time")) + assert_that(result, has_key("startPosition")) + assert_that(result, has_key("dir")) + assert_that(result, has_key("closedPnl")) + assert_that(result, has_key("hash")) + assert_that(result, has_key("oid")) + assert_that(result, has_key("crossed")) + assert_that(result, has_key("fee")) + assert_that(result, has_key("tid")) + assert_that(result, has_key("liquidation")) + assert_that(result, has_key("cloid")) + + def test_transform_fill_values(self): + """Test fill values are correctly transformed.""" + trade = create_mock_trade( + trade_id=98765, + order_id=12345, + market="BTC-USD", + side="BUY", + price=Decimal("50000.00"), + qty=Decimal("0.1"), + fee=Decimal("2.50"), + is_taker=True, + ) + result = OrderTransformer.transform_fill(trade) + + assert_that(result["coin"], equal_to("BTC")) + assert_that(result["px"], equal_to("50000.00")) + assert_that(result["sz"], equal_to("0.1")) + assert_that(result["side"], equal_to("B")) + assert_that(result["oid"], equal_to(12345)) + assert_that(result["fee"], equal_to("2.50")) + assert_that(result["tid"], equal_to(98765)) + assert_that(result["hash"], equal_to("98765")) + assert_that(result["crossed"], equal_to(True)) # is_taker + + def test_transform_fill_sell_side(self): + """Test SELL fill is transformed to 'A'.""" + trade = create_mock_trade(side="SELL") + result = OrderTransformer.transform_fill(trade) + + assert_that(result["side"], equal_to("A")) + + def test_transform_fill_cloid_is_none(self): + """Test cloid is always None (not available from Extended).""" + trade = create_mock_trade() + result = OrderTransformer.transform_fill(trade) + + assert_that(result["cloid"], equal_to(None)) + + def test_transform_fill_not_liquidation(self): + """Test normal trade is not a liquidation.""" + trade = create_mock_trade() + result = OrderTransformer.transform_fill(trade) + + assert_that(result["liquidation"], equal_to(False)) + + +class TestOrderTransformerOrderResponse: + """Tests for transform_order_response method.""" + + def test_transform_order_response_structure(self): + """Test order response has correct Hyperliquid structure.""" + placed_order = create_mock_placed_order(order_id=12345, external_id="test-001") + result = OrderTransformer.transform_order_response(placed_order) + + assert_that(result["status"], equal_to("ok")) + assert_that(result["response"]["type"], equal_to("order")) + assert_that(result["response"]["data"], has_key("statuses")) + + def test_transform_order_response_values(self): + """Test order response values.""" + placed_order = create_mock_placed_order(order_id=12345, external_id="test-001") + result = OrderTransformer.transform_order_response(placed_order) + + statuses = result["response"]["data"]["statuses"] + assert_that(len(statuses), equal_to(1)) + assert_that(statuses[0]["resting"]["oid"], equal_to(12345)) + assert_that(statuses[0]["resting"]["cloid"], equal_to("test-001")) + + +class TestOrderTransformerCancelResponse: + """Tests for transform_cancel_response method.""" + + def test_transform_cancel_response_success(self): + """Test successful cancel response.""" + result = OrderTransformer.transform_cancel_response(success=True) + + assert_that(result["status"], equal_to("ok")) + assert_that(result["response"]["type"], equal_to("cancel")) + assert_that(result["response"]["data"]["statuses"][0], equal_to("success")) + + def test_transform_cancel_response_failure(self): + """Test failed cancel response.""" + result = OrderTransformer.transform_cancel_response(success=False) + + assert_that(result["status"], equal_to("err")) + assert_that(result["response"], equal_to("Cancel failed")) + + +class TestOrderTransformerErrorResponse: + """Tests for transform_error_response method.""" + + def test_transform_error_response(self): + """Test error response formatting.""" + result = OrderTransformer.transform_error_response("Order rejected: insufficient margin") + + assert_that(result["status"], equal_to("err")) + assert_that(result["response"], equal_to("Order rejected: insufficient margin")) + + +class TestOrderTransformerBulkOrdersResponse: + """Tests for transform_bulk_orders_response method.""" + + def test_transform_bulk_orders_response_all_success(self): + """Test bulk orders response with all successes.""" + results = [ + {"status": "ok", "data": {"id": 12345, "external_id": "order-1"}}, + {"status": "ok", "data": {"id": 12346, "external_id": "order-2"}}, + ] + result = OrderTransformer.transform_bulk_orders_response(results) + + assert_that(result["status"], equal_to("ok")) + assert_that(result["response"]["type"], equal_to("order")) + + statuses = result["response"]["data"]["statuses"] + assert_that(len(statuses), equal_to(2)) + assert_that(statuses[0]["resting"]["oid"], equal_to(12345)) + assert_that(statuses[1]["resting"]["oid"], equal_to(12346)) + + def test_transform_bulk_orders_response_mixed(self): + """Test bulk orders response with mixed results.""" + results = [ + {"status": "ok", "data": {"id": 12345, "external_id": "order-1"}}, + {"status": "err", "error": "Insufficient margin"}, + ] + result = OrderTransformer.transform_bulk_orders_response(results) + + statuses = result["response"]["data"]["statuses"] + assert_that(len(statuses), equal_to(2)) + assert_that(statuses[0]["resting"]["oid"], equal_to(12345)) + assert_that(statuses[1]["error"], equal_to("Insufficient margin")) + + +class TestOrderTransformerLeverageResponse: + """Tests for transform_leverage_response method.""" + + def test_transform_leverage_response(self): + """Test leverage response formatting.""" + result = OrderTransformer.transform_leverage_response() + + assert_that(result["status"], equal_to("ok")) + assert_that(result["response"]["type"], equal_to("leverage")) diff --git a/tests/extended/test_types.py b/tests/extended/test_types.py new file mode 100644 index 0000000..d17e4b4 --- /dev/null +++ b/tests/extended/test_types.py @@ -0,0 +1,192 @@ +""" +Unit tests for Extended SDK type definitions. + +Tests type enums and dataclasses in extended.types. +""" + +import pytest +from decimal import Decimal +from hamcrest import assert_that, equal_to + +from x10.perpetual.orders import OrderSide, TimeInForce as X10TimeInForce +from x10.perpetual.positions import PositionSide + +from extended.types import ( + Side, + TimeInForce, + LimitOrderType, + OrderTypeSpec, + BuilderInfo, +) + + +class TestSideEnum: + """Tests for Side enum.""" + + def test_side_values(self): + """Test Side enum values.""" + assert_that(Side.BUY.value, equal_to("B")) + assert_that(Side.SELL.value, equal_to("A")) + + def test_from_is_buy_true(self): + """Test converting is_buy=True to BUY.""" + assert_that(Side.from_is_buy(True), equal_to(Side.BUY)) + + def test_from_is_buy_false(self): + """Test converting is_buy=False to SELL.""" + assert_that(Side.from_is_buy(False), equal_to(Side.SELL)) + + def test_from_x10_side_buy(self): + """Test converting x10 BUY side.""" + assert_that(Side.from_x10_side(OrderSide.BUY), equal_to(Side.BUY)) + assert_that(Side.from_x10_side("BUY"), equal_to(Side.BUY)) + assert_that(Side.from_x10_side("buy"), equal_to(Side.BUY)) + + def test_from_x10_side_sell(self): + """Test converting x10 SELL side.""" + assert_that(Side.from_x10_side(OrderSide.SELL), equal_to(Side.SELL)) + assert_that(Side.from_x10_side("SELL"), equal_to(Side.SELL)) + assert_that(Side.from_x10_side("sell"), equal_to(Side.SELL)) + + def test_from_x10_side_long(self): + """Test converting LONG position side to BUY.""" + assert_that(Side.from_x10_side(PositionSide.LONG), equal_to(Side.BUY)) + assert_that(Side.from_x10_side("LONG"), equal_to(Side.BUY)) + + def test_from_x10_side_short(self): + """Test converting SHORT position side to SELL.""" + assert_that(Side.from_x10_side(PositionSide.SHORT), equal_to(Side.SELL)) + assert_that(Side.from_x10_side("SHORT"), equal_to(Side.SELL)) + + def test_to_is_buy(self): + """Test converting Side to is_buy boolean.""" + assert_that(Side.BUY.to_is_buy(), equal_to(True)) + assert_that(Side.SELL.to_is_buy(), equal_to(False)) + + +class TestTimeInForceEnum: + """Tests for TimeInForce enum.""" + + def test_tif_values(self): + """Test TimeInForce enum values.""" + assert_that(TimeInForce.GTC.value, equal_to("Gtc")) + assert_that(TimeInForce.IOC.value, equal_to("Ioc")) + assert_that(TimeInForce.ALO.value, equal_to("Alo")) + + def test_to_x10_tif_gtc(self): + """Test converting GTC to x10 GTT.""" + assert_that(TimeInForce.GTC.to_x10_tif(), equal_to(X10TimeInForce.GTT)) + + def test_to_x10_tif_ioc(self): + """Test converting IOC to x10 IOC.""" + assert_that(TimeInForce.IOC.to_x10_tif(), equal_to(X10TimeInForce.IOC)) + + def test_to_x10_tif_alo(self): + """Test converting ALO to x10 GTT (ALO uses GTT with post_only).""" + assert_that(TimeInForce.ALO.to_x10_tif(), equal_to(X10TimeInForce.GTT)) + + def test_is_post_only(self): + """Test is_post_only property.""" + assert_that(TimeInForce.GTC.is_post_only, equal_to(False)) + assert_that(TimeInForce.IOC.is_post_only, equal_to(False)) + assert_that(TimeInForce.ALO.is_post_only, equal_to(True)) + + +class TestOrderTypeSpec: + """Tests for OrderTypeSpec dataclass.""" + + def test_from_dict_gtc(self): + """Test creating GTC order type from dict.""" + data = {"limit": {"tif": "Gtc"}} + spec = OrderTypeSpec.from_dict(data) + + assert_that(spec.limit.tif, equal_to(TimeInForce.GTC)) + + def test_from_dict_ioc(self): + """Test creating IOC order type from dict.""" + data = {"limit": {"tif": "Ioc"}} + spec = OrderTypeSpec.from_dict(data) + + assert_that(spec.limit.tif, equal_to(TimeInForce.IOC)) + + def test_from_dict_alo(self): + """Test creating ALO order type from dict.""" + data = {"limit": {"tif": "Alo"}} + spec = OrderTypeSpec.from_dict(data) + + assert_that(spec.limit.tif, equal_to(TimeInForce.ALO)) + + def test_from_dict_default_tif(self): + """Test default TIF when not specified.""" + data = {"limit": {}} + spec = OrderTypeSpec.from_dict(data) + + assert_that(spec.limit.tif, equal_to(TimeInForce.GTC)) + + def test_from_dict_no_limit_key(self): + """Test default when no limit key.""" + data = {} + spec = OrderTypeSpec.from_dict(data) + + assert_that(spec.limit.tif, equal_to(TimeInForce.GTC)) + + def test_to_dict(self): + """Test converting OrderTypeSpec to dict.""" + spec = OrderTypeSpec(limit=LimitOrderType(tif=TimeInForce.IOC)) + result = spec.to_dict() + + assert_that(result, equal_to({"limit": {"tif": "Ioc"}})) + + +class TestBuilderInfo: + """Tests for BuilderInfo dataclass.""" + + def test_from_dict_none(self): + """Test from_dict with None input.""" + result = BuilderInfo.from_dict(None) + assert_that(result, equal_to(None)) + + def test_from_dict_valid(self): + """Test from_dict with valid input.""" + data = {"b": "123", "f": 10} + info = BuilderInfo.from_dict(data) + + assert_that(info.b, equal_to("123")) + assert_that(info.f, equal_to(10)) + + def test_from_dict_numeric_builder_id(self): + """Test from_dict converts numeric builder_id to string.""" + data = {"b": 456, "f": 20} + info = BuilderInfo.from_dict(data) + + assert_that(info.b, equal_to("456")) + + def test_to_dict(self): + """Test converting BuilderInfo to dict.""" + info = BuilderInfo(b="789", f=50) + result = info.to_dict() + + assert_that(result, equal_to({"b": "789", "f": 50})) + + def test_builder_id_property(self): + """Test builder_id property returns int.""" + info = BuilderInfo(b="123", f=10) + assert_that(info.builder_id, equal_to(123)) + + def test_fee_decimal_conversion(self): + """Test fee_decimal property converts correctly.""" + # f=1 -> 0.1 bps -> 0.00001 + info1 = BuilderInfo(b="1", f=1) + assert_that(info1.fee_decimal, equal_to(Decimal("0.00001"))) + + # f=10 -> 1 bps -> 0.0001 + info2 = BuilderInfo(b="1", f=10) + assert_that(info2.fee_decimal, equal_to(Decimal("0.0001"))) + + # f=50 -> 5 bps -> 0.0005 + info3 = BuilderInfo(b="1", f=50) + assert_that(info3.fee_decimal, equal_to(Decimal("0.0005"))) + + # f=100 -> 10 bps -> 0.001 + info4 = BuilderInfo(b="1", f=100) + assert_that(info4.fee_decimal, equal_to(Decimal("0.001"))) diff --git a/tests/extended/utils/test_thread_safety.py b/tests/extended/utils/test_thread_safety.py new file mode 100644 index 0000000..af8a626 --- /dev/null +++ b/tests/extended/utils/test_thread_safety.py @@ -0,0 +1,782 @@ +""" +Comprehensive test suite for thread-safe async operations. + +Tests the fixes for: +- RuntimeError: Task got Future attached to a different loop +- ThreadPoolExecutor contexts +- Celery worker contexts (simulated) +- Nested event loops +- Multiple concurrent threads +""" + +import asyncio +import threading +import time +from concurrent.futures import ThreadPoolExecutor, as_completed +from unittest.mock import AsyncMock, Mock, patch + +import pytest + +from extended.utils.async_helpers import ( + get_current_loop, + thread_safe_create_task, + thread_safe_gather, + thread_safe_wait, + thread_safe_wait_for, +) +from extended.utils.helpers import run_sync + + +class TestRunSync: + """Tests for the thread-safe run_sync function.""" + + def test_basic_sync_call(self): + """Test basic synchronous execution of async coroutine.""" + + async def simple_async(): + return "hello" + + result = run_sync(simple_async()) + assert result == "hello" + + def test_async_with_await(self): + """Test async function with internal await.""" + + async def async_with_sleep(): + await asyncio.sleep(0.01) + return "slept" + + result = run_sync(async_with_sleep()) + assert result == "slept" + + def test_nested_run_sync(self): + """Test nested run_sync calls (with nest_asyncio).""" + + async def inner(): + return "inner_result" + + async def outer(): + # This would fail without nest_asyncio + return run_sync(inner()) + + result = run_sync(outer()) + assert result == "inner_result" + + def test_deeply_nested_run_sync(self): + """Test deeply nested run_sync calls.""" + + async def level3(): + return "level3" + + async def level2(): + return run_sync(level3()) + + async def level1(): + return run_sync(level2()) + + result = run_sync(level1()) + assert result == "level3" + + def test_run_sync_preserves_exceptions(self): + """Test that exceptions propagate correctly.""" + + async def raising_async(): + raise ValueError("test error") + + with pytest.raises(ValueError, match="test error"): + run_sync(raising_async()) + + def test_run_sync_with_return_value_types(self): + """Test various return value types.""" + + async def return_dict(): + return {"key": "value"} + + async def return_list(): + return [1, 2, 3] + + async def return_none(): + return None + + assert run_sync(return_dict()) == {"key": "value"} + assert run_sync(return_list()) == [1, 2, 3] + assert run_sync(return_none()) is None + + +class TestRunSyncThreadPoolExecutor: + """Tests for run_sync in ThreadPoolExecutor contexts.""" + + def test_single_thread_executor(self): + """Test run_sync in single-threaded executor.""" + + def worker(): + async def async_work(): + await asyncio.sleep(0.01) + return "worker_done" + + return run_sync(async_work()) + + with ThreadPoolExecutor(max_workers=1) as executor: + future = executor.submit(worker) + result = future.result(timeout=5) + + assert result == "worker_done" + + def test_multi_thread_executor_parallel(self): + """Test multiple parallel calls in ThreadPoolExecutor.""" + + def worker(n): + async def async_work(): + await asyncio.sleep(0.01) + return f"worker_{n}" + + return run_sync(async_work()) + + with ThreadPoolExecutor(max_workers=5) as executor: + futures = [executor.submit(worker, i) for i in range(5)] + results = [f.result(timeout=10) for f in futures] + + assert len(results) == 5 + assert set(results) == {f"worker_{i}" for i in range(5)} + + def test_high_concurrency_executor(self): + """Test with high concurrency to stress test loop isolation.""" + + def worker(n): + async def async_work(): + await asyncio.sleep(0.001) + return n + + return run_sync(async_work()) + + with ThreadPoolExecutor(max_workers=20) as executor: + futures = [executor.submit(worker, i) for i in range(100)] + results = [f.result(timeout=30) for f in futures] + + assert len(results) == 100 + assert set(results) == set(range(100)) + + def test_executor_with_nested_run_sync(self): + """Test nested run_sync inside ThreadPoolExecutor.""" + + def worker(n): + async def inner(): + await asyncio.sleep(0.01) + return f"inner_{n}" + + async def outer(): + return run_sync(inner()) + + return run_sync(outer()) + + with ThreadPoolExecutor(max_workers=5) as executor: + futures = [executor.submit(worker, i) for i in range(5)] + results = [f.result(timeout=10) for f in futures] + + assert results == [f"inner_{i}" for i in range(5)] + + def test_executor_exception_handling(self): + """Test exception handling in ThreadPoolExecutor context.""" + + def worker(should_fail): + async def async_work(): + if should_fail: + raise ValueError("intentional failure") + return "success" + + return run_sync(async_work()) + + with ThreadPoolExecutor(max_workers=2) as executor: + future_success = executor.submit(worker, False) + future_failure = executor.submit(worker, True) + + assert future_success.result(timeout=5) == "success" + + with pytest.raises(ValueError, match="intentional failure"): + future_failure.result(timeout=5) + + +class TestThreadSafeGather: + """Tests for thread_safe_gather function.""" + + @pytest.mark.asyncio + async def test_basic_gather(self): + """Test basic gather functionality.""" + + async def task1(): + await asyncio.sleep(0.01) + return "result1" + + async def task2(): + await asyncio.sleep(0.01) + return "result2" + + results = await thread_safe_gather(task1(), task2()) + assert results == ["result1", "result2"] + + @pytest.mark.asyncio + async def test_gather_empty(self): + """Test gather with no arguments.""" + results = await thread_safe_gather() + assert results == [] + + @pytest.mark.asyncio + async def test_gather_single_task(self): + """Test gather with single task.""" + + async def single(): + return "single" + + results = await thread_safe_gather(single()) + assert results == ["single"] + + @pytest.mark.asyncio + async def test_gather_with_exceptions_returned(self): + """Test gather with return_exceptions=True.""" + + async def success(): + return "success" + + async def failure(): + raise ValueError("test error") + + results = await thread_safe_gather( + success(), failure(), return_exceptions=True + ) + + assert results[0] == "success" + assert isinstance(results[1], ValueError) + assert str(results[1]) == "test error" + + @pytest.mark.asyncio + async def test_gather_with_exceptions_raised(self): + """Test gather with return_exceptions=False (default).""" + + async def success(): + return "success" + + async def failure(): + raise ValueError("test error") + + with pytest.raises(ValueError, match="test error"): + await thread_safe_gather(success(), failure()) + + @pytest.mark.asyncio + async def test_gather_preserves_order(self): + """Test that results are in the same order as inputs.""" + + async def delayed(n, delay): + await asyncio.sleep(delay) + return n + + # Tasks with different delays + results = await thread_safe_gather( + delayed(1, 0.03), + delayed(2, 0.01), + delayed(3, 0.02), + ) + + # Results should be in input order, not completion order + assert results == [1, 2, 3] + + def test_gather_in_threadpool_executor(self): + """Test thread_safe_gather inside ThreadPoolExecutor.""" + + def worker(n): + async def async_work(): + async def task1(): + await asyncio.sleep(0.01) + return f"task1_{n}" + + async def task2(): + await asyncio.sleep(0.01) + return f"task2_{n}" + + return await thread_safe_gather(task1(), task2()) + + return run_sync(async_work()) + + with ThreadPoolExecutor(max_workers=5) as executor: + futures = [executor.submit(worker, i) for i in range(5)] + results = [f.result(timeout=10) for f in futures] + + for i, result in enumerate(results): + assert result == [f"task1_{i}", f"task2_{i}"] + + +class TestThreadSafeWaitFor: + """Tests for thread_safe_wait_for function.""" + + @pytest.mark.asyncio + async def test_basic_wait_for(self): + """Test basic wait_for functionality.""" + + async def slow_task(): + await asyncio.sleep(0.01) + return "done" + + result = await thread_safe_wait_for(slow_task(), timeout=5.0) + assert result == "done" + + @pytest.mark.asyncio + async def test_wait_for_timeout(self): + """Test wait_for with timeout exceeded.""" + + async def very_slow_task(): + await asyncio.sleep(10) + return "done" + + with pytest.raises(asyncio.TimeoutError): + await thread_safe_wait_for(very_slow_task(), timeout=0.01) + + @pytest.mark.asyncio + async def test_wait_for_no_timeout(self): + """Test wait_for without timeout.""" + + async def task(): + await asyncio.sleep(0.01) + return "no_timeout" + + result = await thread_safe_wait_for(task(), timeout=None) + assert result == "no_timeout" + + +class TestThreadSafeCreateTask: + """Tests for thread_safe_create_task function.""" + + @pytest.mark.asyncio + async def test_create_task_basic(self): + """Test basic task creation.""" + + async def task_coro(): + await asyncio.sleep(0.01) + return "task_result" + + task = thread_safe_create_task(task_coro()) + result = await task + assert result == "task_result" + + @pytest.mark.asyncio + async def test_create_task_with_name(self): + """Test task creation with name.""" + + async def named_coro(): + return "named" + + task = thread_safe_create_task(named_coro(), name="my_task") + assert task.get_name() == "my_task" + result = await task + assert result == "named" + + +class TestThreadSafeWait: + """Tests for thread_safe_wait function.""" + + @pytest.mark.asyncio + async def test_wait_all_completed(self): + """Test wait with ALL_COMPLETED.""" + + async def task(n): + await asyncio.sleep(0.01 * n) + return n + + tasks = {task(1), task(2), task(3)} + done, pending = await thread_safe_wait(tasks) + + assert len(done) == 3 + assert len(pending) == 0 + + @pytest.mark.asyncio + async def test_wait_first_completed(self): + """Test wait with FIRST_COMPLETED.""" + + async def fast_task(): + await asyncio.sleep(0.01) + return "fast" + + async def slow_task(): + await asyncio.sleep(1) + return "slow" + + tasks = {fast_task(), slow_task()} + done, pending = await thread_safe_wait( + tasks, return_when=asyncio.FIRST_COMPLETED + ) + + assert len(done) == 1 + assert len(pending) == 1 + + # Cancel pending tasks + for task in pending: + task.cancel() + + +class TestGetCurrentLoop: + """Tests for get_current_loop function.""" + + def test_main_thread_loop(self): + """Test getting loop in main thread.""" + loop = get_current_loop() + assert loop is not None + assert isinstance(loop, asyncio.AbstractEventLoop) + assert not loop.is_closed() + + def test_worker_thread_loop_isolation(self): + """Test that worker threads get isolated loops.""" + + def get_loop_info(): + loop = get_current_loop() + return { + "loop_id": id(loop), + "thread_id": threading.get_ident(), + "is_closed": loop.is_closed(), + } + + with ThreadPoolExecutor(max_workers=3) as executor: + futures = [executor.submit(get_loop_info) for _ in range(3)] + results = [f.result() for f in futures] + + # All loops should be open + for r in results: + assert not r["is_closed"] + + # All threads should be different + thread_ids = [r["thread_id"] for r in results] + assert len(set(thread_ids)) == 3 + + # All loops should be different + loop_ids = [r["loop_id"] for r in results] + assert len(set(loop_ids)) == 3 + + def test_same_thread_same_loop(self): + """Test that same thread gets same loop on repeated calls.""" + + def get_loops(): + loop1 = get_current_loop() + loop2 = get_current_loop() + loop3 = get_current_loop() + return [id(loop1), id(loop2), id(loop3)] + + with ThreadPoolExecutor(max_workers=1) as executor: + future = executor.submit(get_loops) + loop_ids = future.result() + + # All should be the same loop + assert len(set(loop_ids)) == 1 + + +class TestStrategySimulation: + """ + Tests simulating the actual strategy execution pattern + that was causing the original errors. + """ + + def test_cross_exchange_data_acquisition_pattern(self): + """ + Simulate _acquire_cross_exchange_data from the trading strategy. + + This is the exact pattern that was failing with: + RuntimeError: Task got Future attached to a different loop + """ + + def simulate_api_call(method_name, *args): + """Simulate Extended SDK API calls.""" + + async def async_api(): + # Simulate network delay + await asyncio.sleep(0.01) + + if method_name == "user_state": + # Simulate user_state which uses asyncio.gather internally + async def get_balance(): + await asyncio.sleep(0.005) + return {"balance": "1000"} + + async def get_positions(): + await asyncio.sleep(0.005) + return [{"symbol": "BTC", "size": "0.1"}] + + balance, positions = await thread_safe_gather( + get_balance(), get_positions() + ) + return { + "assetPositions": positions, + "marginSummary": {"accountValue": balance["balance"]}, + } + + elif method_name == "meta": + return {"universe": [{"name": "BTC", "szDecimals": 5}]} + + elif method_name == "all_mids": + return {"BTC": "50000", "ETH": "3000"} + + else: + raise ValueError(f"Unknown method: {method_name}") + + return run_sync(async_api()) + + # Simulate strategy's ThreadPoolExecutor pattern + with ThreadPoolExecutor(max_workers=10, thread_name_prefix="CrossEx") as executor: + futures = {} + futures[executor.submit(simulate_api_call, "user_state")] = ( + "user_state", + "client", + ) + futures[executor.submit(simulate_api_call, "meta")] = ("meta", "client") + futures[executor.submit(simulate_api_call, "all_mids")] = ( + "all_mids", + "client", + ) + + results = {} + failed_sources = [] + + for future in as_completed(futures): + key, source = futures[future] + try: + timeout = 20 if key in ["user_state", "all_mids", "meta"] else 15 + result = future.result(timeout=timeout) + results[key] = result + except Exception as e: + failed_sources.append((key, str(e))) + + # Verify no failures + assert len(failed_sources) == 0, f"Failed sources: {failed_sources}" + + # Verify all results + assert "user_state" in results + assert "meta" in results + assert "all_mids" in results + + assert "assetPositions" in results["user_state"] + assert "universe" in results["meta"] + assert "BTC" in results["all_mids"] + + def test_multiple_strategy_iterations(self): + """ + Test multiple iterations of the strategy pattern + to ensure no accumulated state issues. + """ + + def single_iteration(iteration): + def api_call(n): + async def work(): + await asyncio.sleep(0.01) + return f"iter_{iteration}_call_{n}" + + return run_sync(work()) + + with ThreadPoolExecutor(max_workers=3) as executor: + futures = [executor.submit(api_call, i) for i in range(3)] + return [f.result(timeout=5) for f in futures] + + # Run multiple iterations + for i in range(10): + results = single_iteration(i) + assert len(results) == 3 + assert all(f"iter_{i}" in r for r in results) + + def test_mixed_sync_and_async_api_calls(self): + """Test mixing sync and async API patterns.""" + + def sync_api_call(): + return "sync_result" + + def async_api_call(): + async def work(): + await asyncio.sleep(0.01) + return "async_result" + + return run_sync(work()) + + def gather_api_call(): + async def work(): + async def t1(): + return "gather_1" + + async def t2(): + return "gather_2" + + return await thread_safe_gather(t1(), t2()) + + return run_sync(work()) + + with ThreadPoolExecutor(max_workers=5) as executor: + futures = { + executor.submit(sync_api_call): "sync", + executor.submit(async_api_call): "async", + executor.submit(gather_api_call): "gather", + } + + results = {} + for future in as_completed(futures): + key = futures[future] + results[key] = future.result(timeout=5) + + assert results["sync"] == "sync_result" + assert results["async"] == "async_result" + assert results["gather"] == ["gather_1", "gather_2"] + + +class TestEdgeCases: + """Tests for edge cases and potential issues.""" + + def test_rapid_loop_creation(self): + """Test rapid creation and use of loops in many threads.""" + + def quick_work(n): + async def work(): + return n + + return run_sync(work()) + + with ThreadPoolExecutor(max_workers=50) as executor: + futures = [executor.submit(quick_work, i) for i in range(200)] + results = [f.result(timeout=30) for f in futures] + + assert len(results) == 200 + assert set(results) == set(range(200)) + + def test_long_running_async_tasks(self): + """Test with longer-running async tasks.""" + + def long_worker(n): + async def work(): + await asyncio.sleep(0.1) + return n + + return run_sync(work()) + + with ThreadPoolExecutor(max_workers=5) as executor: + futures = [executor.submit(long_worker, i) for i in range(5)] + results = [f.result(timeout=10) for f in futures] + + assert len(results) == 5 + + def test_exception_in_gather(self): + """Test exception handling within gathered tasks in ThreadPool.""" + + def worker(): + async def work(): + async def success(): + return "ok" + + async def failure(): + raise RuntimeError("task failed") + + return await thread_safe_gather( + success(), failure(), return_exceptions=True + ) + + return run_sync(work()) + + with ThreadPoolExecutor(max_workers=1) as executor: + future = executor.submit(worker) + result = future.result(timeout=5) + + assert result[0] == "ok" + assert isinstance(result[1], RuntimeError) + + def test_sequential_then_parallel(self): + """Test sequential calls followed by parallel calls.""" + + # Sequential calls first + async def seq_task(): + return "seq" + + for _ in range(5): + result = run_sync(seq_task()) + assert result == "seq" + + # Then parallel calls + def parallel_worker(n): + async def work(): + await asyncio.sleep(0.01) + return f"parallel_{n}" + + return run_sync(work()) + + with ThreadPoolExecutor(max_workers=5) as executor: + futures = [executor.submit(parallel_worker, i) for i in range(5)] + results = [f.result(timeout=10) for f in futures] + + assert len(results) == 5 + + def test_main_thread_interleaved_with_workers(self): + """Test main thread calls interleaved with worker thread calls.""" + results = [] + + # Main thread call + async def main_task(): + return "main" + + results.append(("main_1", run_sync(main_task()))) + + # Worker calls + def worker(n): + async def work(): + return f"worker_{n}" + + return run_sync(work()) + + with ThreadPoolExecutor(max_workers=2) as executor: + futures = [executor.submit(worker, i) for i in range(2)] + for i, f in enumerate(futures): + results.append((f"worker_{i}", f.result(timeout=5))) + + # Main thread call again + results.append(("main_2", run_sync(main_task()))) + + assert len(results) == 4 + assert results[0] == ("main_1", "main") + assert results[-1] == ("main_2", "main") + + +class TestMemoryAndPerformance: + """Tests for memory usage and performance characteristics.""" + + def test_no_loop_accumulation(self): + """ + Test that we don't accumulate event loops. + ThreadPoolExecutor reuses threads, so loops should be reused too. + """ + + def get_loop_id(): + loop = get_current_loop() + return id(loop) + + with ThreadPoolExecutor(max_workers=2) as executor: + # Submit many tasks (more than workers) + futures = [executor.submit(get_loop_id) for _ in range(20)] + loop_ids = [f.result() for f in futures] + + # Should only have 2 unique loops (one per worker) + unique_loops = len(set(loop_ids)) + assert unique_loops <= 2, f"Expected max 2 loops, got {unique_loops}" + + def test_performance_not_degraded(self): + """Test that thread-safe operations don't significantly degrade performance.""" + + async def simple_task(): + return 1 + + # Measure direct call time + start = time.time() + for _ in range(100): + run_sync(simple_task()) + direct_time = time.time() - start + + # Measure ThreadPoolExecutor time + def worker(): + return run_sync(simple_task()) + + start = time.time() + with ThreadPoolExecutor(max_workers=4) as executor: + futures = [executor.submit(worker) for _ in range(100)] + [f.result() for f in futures] + executor_time = time.time() - start + + # Executor should not be dramatically slower (allow 10x overhead for thread management) + assert executor_time < direct_time * 10, ( + f"Performance degradation too high: {executor_time:.3f}s vs {direct_time:.3f}s" + ) diff --git a/x10/perpetual/trading_client/account_module.py b/x10/perpetual/trading_client/account_module.py index 93d8bfa..03b406c 100644 --- a/x10/perpetual/trading_client/account_module.py +++ b/x10/perpetual/trading_client/account_module.py @@ -127,7 +127,7 @@ async def get_order_by_external_id(self, external_id: str) -> WrappedApiResponse async def get_trades( self, - market_names: List[str], + market_names: Optional[List[str]] = None, trade_side: Optional[OrderSide] = None, trade_type: Optional[TradeType] = None, cursor: Optional[int] = None, @@ -139,7 +139,7 @@ async def get_trades( url = self._get_url( "/user/trades", - query={"market": market_names, "side": trade_side, "type": trade_type, "cursor": cursor, "limit": limit}, + query={"market": market_names or [], "side": trade_side, "type": trade_type, "cursor": cursor, "limit": limit}, ) return await send_get_request(