From 4f0e718836fff0da8bdd15b3fbea27affc187bbf Mon Sep 17 00:00:00 2001 From: "Joseph T. French" Date: Sun, 8 Feb 2026 12:30:20 -0600 Subject: [PATCH] Enhance graph limits and materialization features - Added content limits to the graph limits API documentation for better clarity on node, relationship, and row limits. - Updated the create graph request model to improve clarity on instance tier options. - Introduced a dry run option in the materialization request to validate limits without executing the operation. - Enhanced the materialization response to include limit check results for dry run requests. - Removed deprecated storage info models to streamline the codebase. These changes improve the API's usability and provide clearer documentation for users. --- .../api/graph_limits/get_graph_limits.py | 4 + .../api/graphs/get_available_graph_tiers.py | 16 +- .../api/materialize/materialize_graph.py | 21 +++ robosystems_client/models/__init__.py | 12 +- .../models/backup_list_response.py | 47 +++++- robosystems_client/models/content_limits.py | 149 ++++++++++++++++++ .../models/create_graph_request.py | 4 +- robosystems_client/models/credit_limits.py | 16 -- robosystems_client/models/download_quota.py | 88 +++++++++++ .../models/graph_limits_response.py | 33 ++++ .../models/graph_subscription_tier.py | 25 ++- .../models/graph_subscriptions.py | 2 +- .../models/materialize_request.py | 10 ++ .../models/materialize_response.py | 49 +++++- ...aterialize_response_limit_check_type_0.py} | 18 +-- robosystems_client/models/storage_info.py | 38 ++--- robosystems_client/models/validation_error.py | 35 +++- ...er_tier.py => validation_error_context.py} | 18 +-- 18 files changed, 486 insertions(+), 99 deletions(-) create mode 100644 robosystems_client/models/content_limits.py create mode 100644 robosystems_client/models/download_quota.py rename robosystems_client/models/{storage_info_overage_pricing.py => materialize_response_limit_check_type_0.py} (64%) rename robosystems_client/models/{storage_info_included_per_tier.py => validation_error_context.py} (66%) diff --git a/robosystems_client/api/graph_limits/get_graph_limits.py b/robosystems_client/api/graph_limits/get_graph_limits.py index a64b8b1..d9fc5b2 100644 --- a/robosystems_client/api/graph_limits/get_graph_limits.py +++ b/robosystems_client/api/graph_limits/get_graph_limits.py @@ -82,6 +82,7 @@ def sync_detailed( - **Backup Limits**: Frequency, retention, size limits - **Rate Limits**: Requests per minute/hour based on tier - **Credit Limits**: AI operation credits (if applicable) + - **Content Limits**: Node, relationship, and row limits (if applicable) This unified endpoint provides all limits in one place for easier client integration. @@ -125,6 +126,7 @@ def sync( - **Backup Limits**: Frequency, retention, size limits - **Rate Limits**: Requests per minute/hour based on tier - **Credit Limits**: AI operation credits (if applicable) + - **Content Limits**: Node, relationship, and row limits (if applicable) This unified endpoint provides all limits in one place for easier client integration. @@ -163,6 +165,7 @@ async def asyncio_detailed( - **Backup Limits**: Frequency, retention, size limits - **Rate Limits**: Requests per minute/hour based on tier - **Credit Limits**: AI operation credits (if applicable) + - **Content Limits**: Node, relationship, and row limits (if applicable) This unified endpoint provides all limits in one place for easier client integration. @@ -204,6 +207,7 @@ async def asyncio( - **Backup Limits**: Frequency, retention, size limits - **Rate Limits**: Requests per minute/hour based on tier - **Credit Limits**: AI operation credits (if applicable) + - **Content Limits**: Node, relationship, and row limits (if applicable) This unified endpoint provides all limits in one place for easier client integration. diff --git a/robosystems_client/api/graphs/get_available_graph_tiers.py b/robosystems_client/api/graphs/get_available_graph_tiers.py index a2c9201..766ecf4 100644 --- a/robosystems_client/api/graphs/get_available_graph_tiers.py +++ b/robosystems_client/api/graphs/get_available_graph_tiers.py @@ -83,11 +83,9 @@ def sync_detailed( - Availability status **Available Tiers:** - - **ladybug-standard**: Multi-tenant entry-level tier + - **ladybug-standard**: Dedicated entry-level tier - **ladybug-large**: Dedicated professional tier with subgraph support - **ladybug-xlarge**: Enterprise tier with maximum resources - - **neo4j-community-large**: Neo4j Community Edition (optional, if enabled) - - **neo4j-enterprise-xlarge**: Neo4j Enterprise Edition (optional, if enabled) **Use Cases:** - Display tier options in graph creation UI @@ -140,11 +138,9 @@ def sync( - Availability status **Available Tiers:** - - **ladybug-standard**: Multi-tenant entry-level tier + - **ladybug-standard**: Dedicated entry-level tier - **ladybug-large**: Dedicated professional tier with subgraph support - **ladybug-xlarge**: Enterprise tier with maximum resources - - **neo4j-community-large**: Neo4j Community Edition (optional, if enabled) - - **neo4j-enterprise-xlarge**: Neo4j Enterprise Edition (optional, if enabled) **Use Cases:** - Display tier options in graph creation UI @@ -192,11 +188,9 @@ async def asyncio_detailed( - Availability status **Available Tiers:** - - **ladybug-standard**: Multi-tenant entry-level tier + - **ladybug-standard**: Dedicated entry-level tier - **ladybug-large**: Dedicated professional tier with subgraph support - **ladybug-xlarge**: Enterprise tier with maximum resources - - **neo4j-community-large**: Neo4j Community Edition (optional, if enabled) - - **neo4j-enterprise-xlarge**: Neo4j Enterprise Edition (optional, if enabled) **Use Cases:** - Display tier options in graph creation UI @@ -247,11 +241,9 @@ async def asyncio( - Availability status **Available Tiers:** - - **ladybug-standard**: Multi-tenant entry-level tier + - **ladybug-standard**: Dedicated entry-level tier - **ladybug-large**: Dedicated professional tier with subgraph support - **ladybug-xlarge**: Enterprise tier with maximum resources - - **neo4j-community-large**: Neo4j Community Edition (optional, if enabled) - - **neo4j-enterprise-xlarge**: Neo4j Enterprise Edition (optional, if enabled) **Use Cases:** - Display tier options in graph creation UI diff --git a/robosystems_client/api/materialize/materialize_graph.py b/robosystems_client/api/materialize/materialize_graph.py index fbca753..2b4bf7a 100644 --- a/robosystems_client/api/materialize/materialize_graph.py +++ b/robosystems_client/api/materialize/materialize_graph.py @@ -67,6 +67,11 @@ def _parse_response( return response_409 + if response.status_code == 413: + response_413 = ErrorResponse.from_dict(response.json()) + + return response_413 + if response.status_code == 422: response_422 = HTTPValidationError.from_dict(response.json()) @@ -150,6 +155,10 @@ def sync_detailed( Full graph materialization can take minutes for large datasets. Consider running during off-peak hours for production systems. + **Dry Run:** + Set `dry_run=true` to validate limits without executing. Returns current usage, tier limits, + and any warnings or errors. No lock is acquired, no SSE operation is created. + **Credits:** Materialization is included - no credit consumption @@ -234,6 +243,10 @@ def sync( Full graph materialization can take minutes for large datasets. Consider running during off-peak hours for production systems. + **Dry Run:** + Set `dry_run=true` to validate limits without executing. Returns current usage, tier limits, + and any warnings or errors. No lock is acquired, no SSE operation is created. + **Credits:** Materialization is included - no credit consumption @@ -313,6 +326,10 @@ async def asyncio_detailed( Full graph materialization can take minutes for large datasets. Consider running during off-peak hours for production systems. + **Dry Run:** + Set `dry_run=true` to validate limits without executing. Returns current usage, tier limits, + and any warnings or errors. No lock is acquired, no SSE operation is created. + **Credits:** Materialization is included - no credit consumption @@ -395,6 +412,10 @@ async def asyncio( Full graph materialization can take minutes for large datasets. Consider running during off-peak hours for production systems. + **Dry Run:** + Set `dry_run=true` to validate limits without executing. Returns current usage, tier limits, + and any warnings or errors. No lock is acquired, no SSE operation is created. + **Credits:** Materialization is included - no credit consumption diff --git a/robosystems_client/models/__init__.py b/robosystems_client/models/__init__.py index fad8094..5eeb97f 100644 --- a/robosystems_client/models/__init__.py +++ b/robosystems_client/models/__init__.py @@ -55,6 +55,7 @@ from .connection_response import ConnectionResponse from .connection_response_metadata import ConnectionResponseMetadata from .connection_response_provider import ConnectionResponseProvider +from .content_limits import ContentLimits from .copy_operation_limits import CopyOperationLimits from .create_api_key_request import CreateAPIKeyRequest from .create_api_key_response import CreateAPIKeyResponse @@ -90,6 +91,7 @@ DetailedTransactionsResponseDateRange, ) from .detailed_transactions_response_summary import DetailedTransactionsResponseSummary +from .download_quota import DownloadQuota from .email_verification_request import EmailVerificationRequest from .enhanced_credit_transaction_response import EnhancedCreditTransactionResponse from .enhanced_credit_transaction_response_metadata import ( @@ -162,6 +164,7 @@ from .logout_user_response_logoutuser import LogoutUserResponseLogoutuser from .materialize_request import MaterializeRequest from .materialize_response import MaterializeResponse +from .materialize_response_limit_check_type_0 import MaterializeResponseLimitCheckType0 from .materialize_status_response import MaterializeStatusResponse from .mcp_tool_call import MCPToolCall from .mcp_tool_call_arguments import MCPToolCallArguments @@ -252,8 +255,6 @@ from .sso_exchange_response import SSOExchangeResponse from .sso_token_response import SSOTokenResponse from .storage_info import StorageInfo -from .storage_info_included_per_tier import StorageInfoIncludedPerTier -from .storage_info_overage_pricing import StorageInfoOveragePricing from .storage_limit_response import StorageLimitResponse from .storage_limits import StorageLimits from .storage_summary import StorageSummary @@ -289,6 +290,7 @@ from .user_graphs_response import UserGraphsResponse from .user_response import UserResponse from .validation_error import ValidationError +from .validation_error_context import ValidationErrorContext from .view_axis_config import ViewAxisConfig from .view_axis_config_element_labels_type_0 import ViewAxisConfigElementLabelsType0 from .view_axis_config_member_labels_type_0 import ViewAxisConfigMemberLabelsType0 @@ -344,6 +346,7 @@ "ConnectionResponse", "ConnectionResponseMetadata", "ConnectionResponseProvider", + "ContentLimits", "CopyOperationLimits", "CreateAPIKeyRequest", "CreateAPIKeyResponse", @@ -375,6 +378,7 @@ "DetailedTransactionsResponse", "DetailedTransactionsResponseDateRange", "DetailedTransactionsResponseSummary", + "DownloadQuota", "EmailVerificationRequest", "EnhancedCreditTransactionResponse", "EnhancedCreditTransactionResponseMetadata", @@ -433,6 +437,7 @@ "LogoutUserResponseLogoutuser", "MaterializeRequest", "MaterializeResponse", + "MaterializeResponseLimitCheckType0", "MaterializeStatusResponse", "MCPToolCall", "MCPToolCallArguments", @@ -507,8 +512,6 @@ "SSOExchangeResponse", "SSOTokenResponse", "StorageInfo", - "StorageInfoIncludedPerTier", - "StorageInfoOveragePricing", "StorageLimitResponse", "StorageLimits", "StorageSummary", @@ -540,6 +543,7 @@ "UserGraphsResponse", "UserResponse", "ValidationError", + "ValidationErrorContext", "ViewAxisConfig", "ViewAxisConfigElementLabelsType0", "ViewAxisConfigMemberLabelsType0", diff --git a/robosystems_client/models/backup_list_response.py b/robosystems_client/models/backup_list_response.py index 45a24bb..cd3acec 100644 --- a/robosystems_client/models/backup_list_response.py +++ b/robosystems_client/models/backup_list_response.py @@ -1,13 +1,16 @@ from __future__ import annotations from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field +from ..types import UNSET, Unset + if TYPE_CHECKING: from ..models.backup_response import BackupResponse + from ..models.download_quota import DownloadQuota T = TypeVar("T", bound="BackupListResponse") @@ -21,14 +24,20 @@ class BackupListResponse: backups (list[BackupResponse]): total_count (int): graph_id (str): + is_shared_repository (bool | Unset): Whether this is a shared repository (limits apply) Default: False. + download_quota (DownloadQuota | None | Unset): Download quota for shared repositories """ backups: list[BackupResponse] total_count: int graph_id: str + is_shared_repository: bool | Unset = False + download_quota: DownloadQuota | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: + from ..models.download_quota import DownloadQuota + backups = [] for backups_item_data in self.backups: backups_item = backups_item_data.to_dict() @@ -38,6 +47,16 @@ def to_dict(self) -> dict[str, Any]: graph_id = self.graph_id + is_shared_repository = self.is_shared_repository + + download_quota: dict[str, Any] | None | Unset + if isinstance(self.download_quota, Unset): + download_quota = UNSET + elif isinstance(self.download_quota, DownloadQuota): + download_quota = self.download_quota.to_dict() + else: + download_quota = self.download_quota + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -47,12 +66,17 @@ def to_dict(self) -> dict[str, Any]: "graph_id": graph_id, } ) + if is_shared_repository is not UNSET: + field_dict["is_shared_repository"] = is_shared_repository + if download_quota is not UNSET: + field_dict["download_quota"] = download_quota return field_dict @classmethod def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.backup_response import BackupResponse + from ..models.download_quota import DownloadQuota d = dict(src_dict) backups = [] @@ -66,10 +90,31 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: graph_id = d.pop("graph_id") + is_shared_repository = d.pop("is_shared_repository", UNSET) + + def _parse_download_quota(data: object) -> DownloadQuota | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, dict): + raise TypeError() + download_quota_type_0 = DownloadQuota.from_dict(data) + + return download_quota_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(DownloadQuota | None | Unset, data) + + download_quota = _parse_download_quota(d.pop("download_quota", UNSET)) + backup_list_response = cls( backups=backups, total_count=total_count, graph_id=graph_id, + is_shared_repository=is_shared_repository, + download_quota=download_quota, ) backup_list_response.additional_properties = d diff --git a/robosystems_client/models/content_limits.py b/robosystems_client/models/content_limits.py new file mode 100644 index 0000000..924f02e --- /dev/null +++ b/robosystems_client/models/content_limits.py @@ -0,0 +1,149 @@ +from __future__ import annotations + +from collections.abc import Mapping +from typing import Any, TypeVar, cast + +from attrs import define as _attrs_define +from attrs import field as _attrs_field + +from ..types import UNSET, Unset + +T = TypeVar("T", bound="ContentLimits") + + +@_attrs_define +class ContentLimits: + """Graph content limits (nodes, relationships, rows). + + Attributes: + max_nodes (int): Maximum nodes allowed + max_relationships (int): Maximum relationships allowed + max_rows_per_copy (int): Maximum rows per copy/materialization operation + max_single_table_rows (int): Maximum rows per staging table + chunk_size_rows (int): Rows per materialization chunk + current_nodes (int | None | Unset): Current node count + current_relationships (int | None | Unset): Current relationship count + approaching_limits (list[str] | Unset): List of limits being approached (>80%) + """ + + max_nodes: int + max_relationships: int + max_rows_per_copy: int + max_single_table_rows: int + chunk_size_rows: int + current_nodes: int | None | Unset = UNSET + current_relationships: int | None | Unset = UNSET + approaching_limits: list[str] | Unset = UNSET + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + max_nodes = self.max_nodes + + max_relationships = self.max_relationships + + max_rows_per_copy = self.max_rows_per_copy + + max_single_table_rows = self.max_single_table_rows + + chunk_size_rows = self.chunk_size_rows + + current_nodes: int | None | Unset + if isinstance(self.current_nodes, Unset): + current_nodes = UNSET + else: + current_nodes = self.current_nodes + + current_relationships: int | None | Unset + if isinstance(self.current_relationships, Unset): + current_relationships = UNSET + else: + current_relationships = self.current_relationships + + approaching_limits: list[str] | Unset = UNSET + if not isinstance(self.approaching_limits, Unset): + approaching_limits = self.approaching_limits + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "max_nodes": max_nodes, + "max_relationships": max_relationships, + "max_rows_per_copy": max_rows_per_copy, + "max_single_table_rows": max_single_table_rows, + "chunk_size_rows": chunk_size_rows, + } + ) + if current_nodes is not UNSET: + field_dict["current_nodes"] = current_nodes + if current_relationships is not UNSET: + field_dict["current_relationships"] = current_relationships + if approaching_limits is not UNSET: + field_dict["approaching_limits"] = approaching_limits + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + max_nodes = d.pop("max_nodes") + + max_relationships = d.pop("max_relationships") + + max_rows_per_copy = d.pop("max_rows_per_copy") + + max_single_table_rows = d.pop("max_single_table_rows") + + chunk_size_rows = d.pop("chunk_size_rows") + + def _parse_current_nodes(data: object) -> int | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(int | None | Unset, data) + + current_nodes = _parse_current_nodes(d.pop("current_nodes", UNSET)) + + def _parse_current_relationships(data: object) -> int | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + return cast(int | None | Unset, data) + + current_relationships = _parse_current_relationships( + d.pop("current_relationships", UNSET) + ) + + approaching_limits = cast(list[str], d.pop("approaching_limits", UNSET)) + + content_limits = cls( + max_nodes=max_nodes, + max_relationships=max_relationships, + max_rows_per_copy=max_rows_per_copy, + max_single_table_rows=max_single_table_rows, + chunk_size_rows=chunk_size_rows, + current_nodes=current_nodes, + current_relationships=current_relationships, + approaching_limits=approaching_limits, + ) + + content_limits.additional_properties = d + return content_limits + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/create_graph_request.py b/robosystems_client/models/create_graph_request.py index 278fdd1..30dc935 100644 --- a/robosystems_client/models/create_graph_request.py +++ b/robosystems_client/models/create_graph_request.py @@ -27,8 +27,8 @@ class CreateGraphRequest: Attributes: metadata (GraphMetadata): Metadata for graph creation. - instance_tier (str | Unset): Instance tier: ladybug-standard, ladybug-large, ladybug-xlarge, neo4j-community- - large, neo4j-enterprise-xlarge Default: 'ladybug-standard'. + instance_tier (str | Unset): Instance tier: ladybug-standard, ladybug-large, ladybug-xlarge Default: 'ladybug- + standard'. custom_schema (CustomSchemaDefinition | None | Unset): Custom schema definition to apply. If provided, creates a generic custom graph. If omitted, creates an entity graph using schema_extensions. initial_entity (InitialEntityData | None | Unset): Optional initial entity to create in the graph. If provided diff --git a/robosystems_client/models/credit_limits.py b/robosystems_client/models/credit_limits.py index 8521a37..5e5857a 100644 --- a/robosystems_client/models/credit_limits.py +++ b/robosystems_client/models/credit_limits.py @@ -16,14 +16,10 @@ class CreditLimits: Attributes: monthly_ai_credits (int): Monthly AI credits allocation current_balance (int): Current credit balance - storage_billing_enabled (bool): Whether storage billing is enabled - storage_rate_per_gb_per_day (int): Storage billing rate per GB per day """ monthly_ai_credits: int current_balance: int - storage_billing_enabled: bool - storage_rate_per_gb_per_day: int additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -31,18 +27,12 @@ def to_dict(self) -> dict[str, Any]: current_balance = self.current_balance - storage_billing_enabled = self.storage_billing_enabled - - storage_rate_per_gb_per_day = self.storage_rate_per_gb_per_day - field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( { "monthly_ai_credits": monthly_ai_credits, "current_balance": current_balance, - "storage_billing_enabled": storage_billing_enabled, - "storage_rate_per_gb_per_day": storage_rate_per_gb_per_day, } ) @@ -55,15 +45,9 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: current_balance = d.pop("current_balance") - storage_billing_enabled = d.pop("storage_billing_enabled") - - storage_rate_per_gb_per_day = d.pop("storage_rate_per_gb_per_day") - credit_limits = cls( monthly_ai_credits=monthly_ai_credits, current_balance=current_balance, - storage_billing_enabled=storage_billing_enabled, - storage_rate_per_gb_per_day=storage_rate_per_gb_per_day, ) credit_limits.additional_properties = d diff --git a/robosystems_client/models/download_quota.py b/robosystems_client/models/download_quota.py new file mode 100644 index 0000000..2f46f7b --- /dev/null +++ b/robosystems_client/models/download_quota.py @@ -0,0 +1,88 @@ +from __future__ import annotations + +import datetime +from collections.abc import Mapping +from typing import Any, TypeVar + +from attrs import define as _attrs_define +from attrs import field as _attrs_field +from dateutil.parser import isoparse + +T = TypeVar("T", bound="DownloadQuota") + + +@_attrs_define +class DownloadQuota: + """Download quota information for shared repository backups. + + Attributes: + limit_per_day (int): Maximum downloads allowed per day + used_today (int): Number of downloads used today + remaining (int): Downloads remaining today + resets_at (datetime.datetime): When the daily limit resets (UTC) + """ + + limit_per_day: int + used_today: int + remaining: int + resets_at: datetime.datetime + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) + + def to_dict(self) -> dict[str, Any]: + limit_per_day = self.limit_per_day + + used_today = self.used_today + + remaining = self.remaining + + resets_at = self.resets_at.isoformat() + + field_dict: dict[str, Any] = {} + field_dict.update(self.additional_properties) + field_dict.update( + { + "limit_per_day": limit_per_day, + "used_today": used_today, + "remaining": remaining, + "resets_at": resets_at, + } + ) + + return field_dict + + @classmethod + def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + d = dict(src_dict) + limit_per_day = d.pop("limit_per_day") + + used_today = d.pop("used_today") + + remaining = d.pop("remaining") + + resets_at = isoparse(d.pop("resets_at")) + + download_quota = cls( + limit_per_day=limit_per_day, + used_today=used_today, + remaining=remaining, + resets_at=resets_at, + ) + + download_quota.additional_properties = d + return download_quota + + @property + def additional_keys(self) -> list[str]: + return list(self.additional_properties.keys()) + + def __getitem__(self, key: str) -> Any: + return self.additional_properties[key] + + def __setitem__(self, key: str, value: Any) -> None: + self.additional_properties[key] = value + + def __delitem__(self, key: str) -> None: + del self.additional_properties[key] + + def __contains__(self, key: str) -> bool: + return key in self.additional_properties diff --git a/robosystems_client/models/graph_limits_response.py b/robosystems_client/models/graph_limits_response.py index e80dc17..4ecb3ed 100644 --- a/robosystems_client/models/graph_limits_response.py +++ b/robosystems_client/models/graph_limits_response.py @@ -10,6 +10,7 @@ if TYPE_CHECKING: from ..models.backup_limits import BackupLimits + from ..models.content_limits import ContentLimits from ..models.copy_operation_limits import CopyOperationLimits from ..models.credit_limits import CreditLimits from ..models.query_limits import QueryLimits @@ -35,6 +36,7 @@ class GraphLimitsResponse: backups (BackupLimits): Backup operation limits. rate_limits (RateLimits): API rate limits. credits_ (CreditLimits | None | Unset): AI credit limits (if applicable) + content (ContentLimits | None | Unset): Graph content limits (if applicable) """ graph_id: str @@ -47,9 +49,11 @@ class GraphLimitsResponse: backups: BackupLimits rate_limits: RateLimits credits_: CreditLimits | None | Unset = UNSET + content: ContentLimits | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: + from ..models.content_limits import ContentLimits from ..models.credit_limits import CreditLimits graph_id = self.graph_id @@ -78,6 +82,14 @@ def to_dict(self) -> dict[str, Any]: else: credits_ = self.credits_ + content: dict[str, Any] | None | Unset + if isinstance(self.content, Unset): + content = UNSET + elif isinstance(self.content, ContentLimits): + content = self.content.to_dict() + else: + content = self.content + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -95,12 +107,15 @@ def to_dict(self) -> dict[str, Any]: ) if credits_ is not UNSET: field_dict["credits"] = credits_ + if content is not UNSET: + field_dict["content"] = content return field_dict @classmethod def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: from ..models.backup_limits import BackupLimits + from ..models.content_limits import ContentLimits from ..models.copy_operation_limits import CopyOperationLimits from ..models.credit_limits import CreditLimits from ..models.query_limits import QueryLimits @@ -143,6 +158,23 @@ def _parse_credits_(data: object) -> CreditLimits | None | Unset: credits_ = _parse_credits_(d.pop("credits", UNSET)) + def _parse_content(data: object) -> ContentLimits | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, dict): + raise TypeError() + content_type_0 = ContentLimits.from_dict(data) + + return content_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(ContentLimits | None | Unset, data) + + content = _parse_content(d.pop("content", UNSET)) + graph_limits_response = cls( graph_id=graph_id, subscription_tier=subscription_tier, @@ -154,6 +186,7 @@ def _parse_credits_(data: object) -> CreditLimits | None | Unset: backups=backups, rate_limits=rate_limits, credits_=credits_, + content=content, ) graph_limits_response.additional_properties = d diff --git a/robosystems_client/models/graph_subscription_tier.py b/robosystems_client/models/graph_subscription_tier.py index 6da2477..fcdd82c 100644 --- a/robosystems_client/models/graph_subscription_tier.py +++ b/robosystems_client/models/graph_subscription_tier.py @@ -24,14 +24,13 @@ class GraphSubscriptionTier: description (str): Tier description monthly_price_per_graph (float): Monthly price in USD per graph monthly_credits_per_graph (int): Monthly AI credits per graph - storage_included_gb (int): Storage included in GB - storage_overage_per_gb (float): Overage cost per GB per month infrastructure (str): Infrastructure description features (list[str]): List of features backup_retention_days (int): Backup retention in days priority_support (bool): Whether priority support is included api_rate_multiplier (float): API rate multiplier backend (str): Database backend (ladybug or neo4j) + storage_included (bool | Unset): Whether storage is included in the tier Default: True. max_queries_per_hour (int | None | Unset): Maximum queries per hour max_subgraphs (int | Unset): Maximum subgraphs supported Default: 0. instance_type (None | str | Unset): Instance type @@ -42,14 +41,13 @@ class GraphSubscriptionTier: description: str monthly_price_per_graph: float monthly_credits_per_graph: int - storage_included_gb: int - storage_overage_per_gb: float infrastructure: str features: list[str] backup_retention_days: int priority_support: bool api_rate_multiplier: float backend: str + storage_included: bool | Unset = True max_queries_per_hour: int | None | Unset = UNSET max_subgraphs: int | Unset = 0 instance_type: None | str | Unset = UNSET @@ -66,10 +64,6 @@ def to_dict(self) -> dict[str, Any]: monthly_credits_per_graph = self.monthly_credits_per_graph - storage_included_gb = self.storage_included_gb - - storage_overage_per_gb = self.storage_overage_per_gb - infrastructure = self.infrastructure features = self.features @@ -82,6 +76,8 @@ def to_dict(self) -> dict[str, Any]: backend = self.backend + storage_included = self.storage_included + max_queries_per_hour: int | None | Unset if isinstance(self.max_queries_per_hour, Unset): max_queries_per_hour = UNSET @@ -105,8 +101,6 @@ def to_dict(self) -> dict[str, Any]: "description": description, "monthly_price_per_graph": monthly_price_per_graph, "monthly_credits_per_graph": monthly_credits_per_graph, - "storage_included_gb": storage_included_gb, - "storage_overage_per_gb": storage_overage_per_gb, "infrastructure": infrastructure, "features": features, "backup_retention_days": backup_retention_days, @@ -115,6 +109,8 @@ def to_dict(self) -> dict[str, Any]: "backend": backend, } ) + if storage_included is not UNSET: + field_dict["storage_included"] = storage_included if max_queries_per_hour is not UNSET: field_dict["max_queries_per_hour"] = max_queries_per_hour if max_subgraphs is not UNSET: @@ -137,10 +133,6 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: monthly_credits_per_graph = d.pop("monthly_credits_per_graph") - storage_included_gb = d.pop("storage_included_gb") - - storage_overage_per_gb = d.pop("storage_overage_per_gb") - infrastructure = d.pop("infrastructure") features = cast(list[str], d.pop("features")) @@ -153,6 +145,8 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: backend = d.pop("backend") + storage_included = d.pop("storage_included", UNSET) + def _parse_max_queries_per_hour(data: object) -> int | None | Unset: if data is None: return data @@ -181,14 +175,13 @@ def _parse_instance_type(data: object) -> None | str | Unset: description=description, monthly_price_per_graph=monthly_price_per_graph, monthly_credits_per_graph=monthly_credits_per_graph, - storage_included_gb=storage_included_gb, - storage_overage_per_gb=storage_overage_per_gb, infrastructure=infrastructure, features=features, backup_retention_days=backup_retention_days, priority_support=priority_support, api_rate_multiplier=api_rate_multiplier, backend=backend, + storage_included=storage_included, max_queries_per_hour=max_queries_per_hour, max_subgraphs=max_subgraphs, instance_type=instance_type, diff --git a/robosystems_client/models/graph_subscriptions.py b/robosystems_client/models/graph_subscriptions.py index 8b8c79d..b405a22 100644 --- a/robosystems_client/models/graph_subscriptions.py +++ b/robosystems_client/models/graph_subscriptions.py @@ -26,7 +26,7 @@ class GraphSubscriptions: description (str): Description of graph subscriptions pricing_model (str): Pricing model type (per_graph or per_organization) tiers (list[GraphSubscriptionTier]): Available infrastructure tiers - storage (StorageInfo): Storage pricing information. + storage (StorageInfo): Storage information. notes (list[str]): Important notes """ diff --git a/robosystems_client/models/materialize_request.py b/robosystems_client/models/materialize_request.py index 014067d..1566ff7 100644 --- a/robosystems_client/models/materialize_request.py +++ b/robosystems_client/models/materialize_request.py @@ -17,11 +17,14 @@ class MaterializeRequest: force (bool | Unset): Force materialization even if graph is not stale Default: False. rebuild (bool | Unset): Delete and recreate graph database before materialization Default: False. ignore_errors (bool | Unset): Continue ingestion on row errors Default: True. + dry_run (bool | Unset): Validate limits without executing materialization. Returns usage, limits, and warnings. + Default: False. """ force: bool | Unset = False rebuild: bool | Unset = False ignore_errors: bool | Unset = True + dry_run: bool | Unset = False def to_dict(self) -> dict[str, Any]: force = self.force @@ -30,6 +33,8 @@ def to_dict(self) -> dict[str, Any]: ignore_errors = self.ignore_errors + dry_run = self.dry_run + field_dict: dict[str, Any] = {} field_dict.update({}) @@ -39,6 +44,8 @@ def to_dict(self) -> dict[str, Any]: field_dict["rebuild"] = rebuild if ignore_errors is not UNSET: field_dict["ignore_errors"] = ignore_errors + if dry_run is not UNSET: + field_dict["dry_run"] = dry_run return field_dict @@ -51,10 +58,13 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: ignore_errors = d.pop("ignore_errors", UNSET) + dry_run = d.pop("dry_run", UNSET) + materialize_request = cls( force=force, rebuild=rebuild, ignore_errors=ignore_errors, + dry_run=dry_run, ) return materialize_request diff --git a/robosystems_client/models/materialize_response.py b/robosystems_client/models/materialize_response.py index 9ed6d62..f84b958 100644 --- a/robosystems_client/models/materialize_response.py +++ b/robosystems_client/models/materialize_response.py @@ -1,13 +1,19 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any, TypeVar +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field from ..types import UNSET, Unset +if TYPE_CHECKING: + from ..models.materialize_response_limit_check_type_0 import ( + MaterializeResponseLimitCheckType0, + ) + + T = TypeVar("T", bound="MaterializeResponse") @@ -24,15 +30,22 @@ class MaterializeResponse: operation_id (str): SSE operation ID for progress tracking message (str): Human-readable status message status (str | Unset): Operation status Default: 'queued'. + limit_check (MaterializeResponseLimitCheckType0 | None | Unset): Limit check results (only present for dry_run + requests) """ graph_id: str operation_id: str message: str status: str | Unset = "queued" + limit_check: MaterializeResponseLimitCheckType0 | None | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: + from ..models.materialize_response_limit_check_type_0 import ( + MaterializeResponseLimitCheckType0, + ) + graph_id = self.graph_id operation_id = self.operation_id @@ -41,6 +54,14 @@ def to_dict(self) -> dict[str, Any]: status = self.status + limit_check: dict[str, Any] | None | Unset + if isinstance(self.limit_check, Unset): + limit_check = UNSET + elif isinstance(self.limit_check, MaterializeResponseLimitCheckType0): + limit_check = self.limit_check.to_dict() + else: + limit_check = self.limit_check + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -52,11 +73,17 @@ def to_dict(self) -> dict[str, Any]: ) if status is not UNSET: field_dict["status"] = status + if limit_check is not UNSET: + field_dict["limit_check"] = limit_check return field_dict @classmethod def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.materialize_response_limit_check_type_0 import ( + MaterializeResponseLimitCheckType0, + ) + d = dict(src_dict) graph_id = d.pop("graph_id") @@ -66,11 +93,31 @@ def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: status = d.pop("status", UNSET) + def _parse_limit_check( + data: object, + ) -> MaterializeResponseLimitCheckType0 | None | Unset: + if data is None: + return data + if isinstance(data, Unset): + return data + try: + if not isinstance(data, dict): + raise TypeError() + limit_check_type_0 = MaterializeResponseLimitCheckType0.from_dict(data) + + return limit_check_type_0 + except (TypeError, ValueError, AttributeError, KeyError): + pass + return cast(MaterializeResponseLimitCheckType0 | None | Unset, data) + + limit_check = _parse_limit_check(d.pop("limit_check", UNSET)) + materialize_response = cls( graph_id=graph_id, operation_id=operation_id, message=message, status=status, + limit_check=limit_check, ) materialize_response.additional_properties = d diff --git a/robosystems_client/models/storage_info_overage_pricing.py b/robosystems_client/models/materialize_response_limit_check_type_0.py similarity index 64% rename from robosystems_client/models/storage_info_overage_pricing.py rename to robosystems_client/models/materialize_response_limit_check_type_0.py index 294eb4f..5d34fd7 100644 --- a/robosystems_client/models/storage_info_overage_pricing.py +++ b/robosystems_client/models/materialize_response_limit_check_type_0.py @@ -6,14 +6,14 @@ from attrs import define as _attrs_define from attrs import field as _attrs_field -T = TypeVar("T", bound="StorageInfoOveragePricing") +T = TypeVar("T", bound="MaterializeResponseLimitCheckType0") @_attrs_define -class StorageInfoOveragePricing: - """Overage pricing per GB per tier""" +class MaterializeResponseLimitCheckType0: + """ """ - additional_properties: dict[str, float] = _attrs_field(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: field_dict: dict[str, Any] = {} @@ -24,19 +24,19 @@ def to_dict(self) -> dict[str, Any]: @classmethod def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) - storage_info_overage_pricing = cls() + materialize_response_limit_check_type_0 = cls() - storage_info_overage_pricing.additional_properties = d - return storage_info_overage_pricing + materialize_response_limit_check_type_0.additional_properties = d + return materialize_response_limit_check_type_0 @property def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) - def __getitem__(self, key: str) -> float: + def __getitem__(self, key: str) -> Any: return self.additional_properties[key] - def __setitem__(self, key: str, value: float) -> None: + def __setitem__(self, key: str, value: Any) -> None: self.additional_properties[key] = value def __delitem__(self, key: str) -> None: diff --git a/robosystems_client/models/storage_info.py b/robosystems_client/models/storage_info.py index 801c6e0..82527b6 100644 --- a/robosystems_client/models/storage_info.py +++ b/robosystems_client/models/storage_info.py @@ -1,61 +1,45 @@ from __future__ import annotations from collections.abc import Mapping -from typing import TYPE_CHECKING, Any, TypeVar +from typing import Any, TypeVar from attrs import define as _attrs_define from attrs import field as _attrs_field -if TYPE_CHECKING: - from ..models.storage_info_included_per_tier import StorageInfoIncludedPerTier - from ..models.storage_info_overage_pricing import StorageInfoOveragePricing - +from ..types import UNSET, Unset T = TypeVar("T", bound="StorageInfo") @_attrs_define class StorageInfo: - """Storage pricing information. + """Storage information. Attributes: - included_per_tier (StorageInfoIncludedPerTier): Storage included per tier in GB - overage_pricing (StorageInfoOveragePricing): Overage pricing per GB per tier + description (str | Unset): Storage billing description Default: 'Storage included in tier'. """ - included_per_tier: StorageInfoIncludedPerTier - overage_pricing: StorageInfoOveragePricing + description: str | Unset = "Storage included in tier" additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: - included_per_tier = self.included_per_tier.to_dict() - - overage_pricing = self.overage_pricing.to_dict() + description = self.description field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) - field_dict.update( - { - "included_per_tier": included_per_tier, - "overage_pricing": overage_pricing, - } - ) + field_dict.update({}) + if description is not UNSET: + field_dict["description"] = description return field_dict @classmethod def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: - from ..models.storage_info_included_per_tier import StorageInfoIncludedPerTier - from ..models.storage_info_overage_pricing import StorageInfoOveragePricing - d = dict(src_dict) - included_per_tier = StorageInfoIncludedPerTier.from_dict(d.pop("included_per_tier")) - - overage_pricing = StorageInfoOveragePricing.from_dict(d.pop("overage_pricing")) + description = d.pop("description", UNSET) storage_info = cls( - included_per_tier=included_per_tier, - overage_pricing=overage_pricing, + description=description, ) storage_info.additional_properties = d diff --git a/robosystems_client/models/validation_error.py b/robosystems_client/models/validation_error.py index 73565dd..c88f7ed 100644 --- a/robosystems_client/models/validation_error.py +++ b/robosystems_client/models/validation_error.py @@ -1,11 +1,17 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any, TypeVar, cast +from typing import TYPE_CHECKING, Any, TypeVar, cast from attrs import define as _attrs_define from attrs import field as _attrs_field +from ..types import UNSET, Unset + +if TYPE_CHECKING: + from ..models.validation_error_context import ValidationErrorContext + + T = TypeVar("T", bound="ValidationError") @@ -16,11 +22,15 @@ class ValidationError: loc (list[int | str]): msg (str): type_ (str): + input_ (Any | Unset): + ctx (ValidationErrorContext | Unset): """ loc: list[int | str] msg: str type_: str + input_: Any | Unset = UNSET + ctx: ValidationErrorContext | Unset = UNSET additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: @@ -34,6 +44,12 @@ def to_dict(self) -> dict[str, Any]: type_ = self.type_ + input_ = self.input_ + + ctx: dict[str, Any] | Unset = UNSET + if not isinstance(self.ctx, Unset): + ctx = self.ctx.to_dict() + field_dict: dict[str, Any] = {} field_dict.update(self.additional_properties) field_dict.update( @@ -43,11 +59,17 @@ def to_dict(self) -> dict[str, Any]: "type": type_, } ) + if input_ is not UNSET: + field_dict["input"] = input_ + if ctx is not UNSET: + field_dict["ctx"] = ctx return field_dict @classmethod def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: + from ..models.validation_error_context import ValidationErrorContext + d = dict(src_dict) loc = [] _loc = d.pop("loc") @@ -64,10 +86,21 @@ def _parse_loc_item(data: object) -> int | str: type_ = d.pop("type") + input_ = d.pop("input", UNSET) + + _ctx = d.pop("ctx", UNSET) + ctx: ValidationErrorContext | Unset + if isinstance(_ctx, Unset): + ctx = UNSET + else: + ctx = ValidationErrorContext.from_dict(_ctx) + validation_error = cls( loc=loc, msg=msg, type_=type_, + input_=input_, + ctx=ctx, ) validation_error.additional_properties = d diff --git a/robosystems_client/models/storage_info_included_per_tier.py b/robosystems_client/models/validation_error_context.py similarity index 66% rename from robosystems_client/models/storage_info_included_per_tier.py rename to robosystems_client/models/validation_error_context.py index cd081aa..dd95089 100644 --- a/robosystems_client/models/storage_info_included_per_tier.py +++ b/robosystems_client/models/validation_error_context.py @@ -6,14 +6,14 @@ from attrs import define as _attrs_define from attrs import field as _attrs_field -T = TypeVar("T", bound="StorageInfoIncludedPerTier") +T = TypeVar("T", bound="ValidationErrorContext") @_attrs_define -class StorageInfoIncludedPerTier: - """Storage included per tier in GB""" +class ValidationErrorContext: + """ """ - additional_properties: dict[str, int] = _attrs_field(init=False, factory=dict) + additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict) def to_dict(self) -> dict[str, Any]: field_dict: dict[str, Any] = {} @@ -24,19 +24,19 @@ def to_dict(self) -> dict[str, Any]: @classmethod def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T: d = dict(src_dict) - storage_info_included_per_tier = cls() + validation_error_context = cls() - storage_info_included_per_tier.additional_properties = d - return storage_info_included_per_tier + validation_error_context.additional_properties = d + return validation_error_context @property def additional_keys(self) -> list[str]: return list(self.additional_properties.keys()) - def __getitem__(self, key: str) -> int: + def __getitem__(self, key: str) -> Any: return self.additional_properties[key] - def __setitem__(self, key: str, value: int) -> None: + def __setitem__(self, key: str, value: Any) -> None: self.additional_properties[key] = value def __delitem__(self, key: str) -> None: