From 3f1c131435a64da56c4188236f84f76718baa257 Mon Sep 17 00:00:00 2001 From: Xinran Date: Thu, 9 Apr 2026 09:03:01 +0000 Subject: [PATCH 01/10] add websocket SDK --- .../CHANGELOG.md | 15 + .../azure-ai-agentserver-websocket/LICENSE | 21 + .../MANIFEST.in | 8 + .../azure-ai-agentserver-websocket/README.md | 292 ++++++++ .../azure/__init__.py | 1 + .../azure/ai/__init__.py | 1 + .../azure/ai/agentserver/__init__.py | 1 + .../ai/agentserver/invocations/__init__.py | 28 + .../ai/agentserver/invocations/_constants.py | 34 + .../ai/agentserver/invocations/_invocation.py | 660 ++++++++++++++++++ .../ai/agentserver/invocations/_version.py | 5 + .../azure/ai/agentserver/invocations/py.typed | 0 .../cspell.json | 26 + .../dev_requirements.txt | 7 + .../pyproject.toml | 68 ++ .../pyrightconfig.json | 11 + .../samples/browser_client/index.html | 191 +++++ .../browser_client/serve_browser_client.py | 31 + .../streaming_echo_agent/.dockerignore | 6 + .../samples/streaming_echo_agent/Dockerfile | 21 + .../samples/streaming_echo_agent/README.md | 68 ++ .../streaming_echo_agent/agent.manifest.yaml | 16 + .../samples/streaming_echo_agent/agent.yaml | 8 + .../samples/streaming_echo_agent/main.py | 71 ++ .../streaming_echo_agent/requirements.txt | 2 + .../serve_browser_client.py | 31 + .../streaming_invoke_agent/requirements.txt | 2 + .../streaming_invoke_agent.py | 69 ++ .../tests/conftest.py | 192 +++++ .../tests/test_decorator_pattern.py | 209 ++++++ .../tests/test_edge_cases.py | 232 ++++++ .../tests/test_get_cancel.py | 142 ++++ .../tests/test_graceful_shutdown.py | 211 ++++++ .../tests/test_invoke.py | 133 ++++ .../tests/test_multimodal_protocol.py | 178 +++++ .../tests/test_request_limits.py | 44 ++ .../tests/test_server_routes.py | 104 +++ .../tests/test_session_id.py | 104 +++ .../tests/test_span_parenting.py | 124 ++++ .../tests/test_tracing.py | 356 ++++++++++ .../tests/test_ws_keepalive.py | 89 +++ 41 files changed, 3812 insertions(+) create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/CHANGELOG.md create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/LICENSE create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/MANIFEST.in create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/README.md create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/azure/__init__.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/__init__.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/__init__.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/__init__.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/_constants.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/_invocation.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/_version.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/py.typed create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/cspell.json create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/dev_requirements.txt create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/pyproject.toml create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/pyrightconfig.json create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/index.html create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/serve_browser_client.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/.dockerignore create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/Dockerfile create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/README.md create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.manifest.yaml create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.yaml create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/main.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/requirements.txt create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/serve_browser_client.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_invoke_agent/requirements.txt create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_invoke_agent/streaming_invoke_agent.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/tests/conftest.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/tests/test_decorator_pattern.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/tests/test_edge_cases.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/tests/test_get_cancel.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/tests/test_graceful_shutdown.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/tests/test_invoke.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/tests/test_multimodal_protocol.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/tests/test_request_limits.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/tests/test_server_routes.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/tests/test_session_id.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/tests/test_span_parenting.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/tests/test_tracing.py create mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/tests/test_ws_keepalive.py diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/CHANGELOG.md b/sdk/agentserver/azure-ai-agentserver-websocket/CHANGELOG.md new file mode 100644 index 000000000000..1cb00d1154d0 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/CHANGELOG.md @@ -0,0 +1,15 @@ +# Release History + +## 1.0.0b1 (Unreleased) + +### Features Added + +- Initial release of `azure-ai-agentserver-invocations`. +- `InvocationHandler` for wiring invocation protocol endpoints to an `AgentHost`. +- Decorator-based handler registration (`@invocations.invoke_handler`). +- Optional `GET /invocations/{id}` and `POST /invocations/{id}/cancel` endpoints. +- `GET /invocations/docs/openapi.json` for OpenAPI spec serving. +- Invocation ID tracking and session correlation via `agent_session_id` query parameter. +- Distributed tracing with GenAI semantic convention span attributes. +- W3C Baggage propagation for cross-service correlation. +- Streaming response support with span lifecycle management. diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/LICENSE b/sdk/agentserver/azure-ai-agentserver-websocket/LICENSE new file mode 100644 index 000000000000..4c3581d3b052 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) Microsoft Corporation. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/MANIFEST.in b/sdk/agentserver/azure-ai-agentserver-websocket/MANIFEST.in new file mode 100644 index 000000000000..cd83a6c13bfa --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/MANIFEST.in @@ -0,0 +1,8 @@ +include *.md +include LICENSE +recursive-include tests *.py +recursive-include samples *.py *.md +include azure/__init__.py +include azure/ai/__init__.py +include azure/ai/agentserver/__init__.py +include azure/ai/agentserver/invocations/py.typed diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/README.md b/sdk/agentserver/azure-ai-agentserver-websocket/README.md new file mode 100644 index 000000000000..9d296974fa7c --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/README.md @@ -0,0 +1,292 @@ +# Azure AI AgentServerHost Invocations for Python (WebSocket) + +The `azure-ai-agentserver-invocations` package provides the invocation protocol over **WebSocket long connections** for Azure AI Hosted Agent containers. It plugs into the [`azure-ai-agentserver-core`](https://pypi.org/project/azure-ai-agentserver-core/) host framework and exposes a single WebSocket endpoint at `/invocations/ws` that supports invoke, get, cancel, and streaming operations. + +## Getting started + +### Install the package + +```bash +pip install azure-ai-agentserver-invocations +``` + +This automatically installs `azure-ai-agentserver-core` as a dependency. + +### Prerequisites + +- Python 3.10 or later + +## Key concepts + +### InvocationAgentServerHost + +`InvocationAgentServerHost` is an `AgentServerHost` subclass that adds a WebSocket endpoint for the invocation protocol. It provides decorator methods for registering handler functions: + +- `@app.invoke_handler` — **Required.** Handles `invoke` actions. Supports both async functions (non-streaming) and async generators (streaming). +- `@app.get_invocation_handler` — Optional. Handles `get_invocation` actions. +- `@app.cancel_invocation_handler` — Optional. Handles `cancel_invocation` actions. + +### InvocationContext + +Handler functions receive an `InvocationContext` object containing: + +- `context.invocation_id` — The invocation ID (echoed from client or auto-generated UUID). +- `context.session_id` — The resolved session ID. + +### InvocationError + +Handlers can raise `InvocationError(code, message)` to return a domain-specific error to the client without exposing internal details. + +### WebSocket endpoint + +All invocation operations use a single persistent WebSocket connection: + +| Route | Description | +|---|---| +| `ws://host:port/invocations/ws` | WebSocket endpoint for all invocation operations | +| `GET /invocations/docs/openapi.json` | Serve the agent's OpenAPI 3.x spec (HTTP) | +| `GET /readiness` | Health check (HTTP) | + +### Client → Server messages + +All messages are JSON text frames with an `action` field: + +```json +{"action": "invoke", "payload": {...}, "invocation_id": "optional", "session_id": "optional"} +{"action": "get_invocation", "invocation_id": "required"} +{"action": "cancel_invocation", "invocation_id": "required"} +{"action": "ping"} +{"action": "pong"} +``` + +### Server → Client messages + +```json +{"type": "result", "invocation_id": "...", "session_id": "...", "payload": {...}} +{"type": "stream_chunk", "invocation_id": "...", "session_id": "...", "payload": {...}} +{"type": "stream_end", "invocation_id": "...", "session_id": "..."} +{"type": "error", "invocation_id": "...", "error": {"code": "...", "message": "..."}} +{"type": "ping"} +{"type": "pong"} +``` + +### WebSocket keep-alive (ping/pong) + +Azure APIM and Azure Load Balancer silently drop idle WebSocket connections after approximately 4 minutes, even though the backend supports 60-minute connections. To prevent this, the server sends periodic `{"type": "ping"}` messages to each connected client. + +- **Default interval**: 30 seconds (well within the ~4-minute idle timeout). +- **Disable**: Pass `ws_ping_interval=0` to `InvocationAgentServerHost()`. +- **Custom interval**: Pass any positive integer, e.g. `ws_ping_interval=15`. + +Clients should respond with `{"action": "pong"}` when they receive a `{"type": "ping"}` message. Clients may also send `{"action": "ping"}` at any time; the server replies with `{"type": "pong"}`. + +```python +app = InvocationAgentServerHost(ws_ping_interval=20) # ping every 20 seconds +``` + +### Session ID resolution + +Session IDs group related invocations into a conversation. The SDK resolves the session ID in order: + +1. `session_id` field in the WebSocket message +2. `FOUNDRY_AGENT_SESSION_ID` environment variable +3. Auto-generated UUID + +### Distributed tracing + +When tracing is enabled on the `AgentServerHost`, invocation spans are automatically created with GenAI semantic conventions: + +- **Span name**: `invoke_agent {FOUNDRY_AGENT_NAME}:{FOUNDRY_AGENT_VERSION}` +- **Span attributes**: `gen_ai.system`, `gen_ai.operation.name`, `gen_ai.response.id`, `gen_ai.conversation.id`, `gen_ai.agent.id`, `gen_ai.agent.name`, `gen_ai.agent.version` +- **Error tags**: `azure.ai.agentserver.invocations.error.code`, `.error.message` + +## Examples + +### Simple agent + +```python +from azure.ai.agentserver.invocations import InvocationAgentServerHost, InvocationContext + +app = InvocationAgentServerHost() + + +@app.invoke_handler +async def handle(payload: dict, context: InvocationContext) -> dict: + return {"greeting": f"Hello, {payload['name']}!"} + +app.run() +``` + +**Client** (using the `websockets` library): + +```python +import asyncio, json, websockets + +async def main(): + async with websockets.connect("ws://localhost:8088/invocations/ws") as ws: + await ws.send(json.dumps({ + "action": "invoke", + "payload": {"name": "Alice"} + })) + while True: + msg = json.loads(await ws.recv()) + if msg["type"] == "ping": + await ws.send(json.dumps({"action": "pong"})) + elif msg["type"] == "result": + print(msg["payload"]["greeting"]) # Hello, Alice! + break + +asyncio.run(main()) +``` + +### Long-running operations with get/cancel + +```python +import asyncio + +from azure.ai.agentserver.invocations import ( + InvocationAgentServerHost, + InvocationContext, + InvocationError, +) + +_tasks: dict[str, asyncio.Task] = {} +_results: dict[str, dict] = {} + +app = InvocationAgentServerHost() + + +@app.invoke_handler +async def handle(payload: dict, context: InvocationContext) -> dict: + task = asyncio.create_task(do_work(context.invocation_id, payload)) + _tasks[context.invocation_id] = task + return {"invocation_id": context.invocation_id, "status": "running"} + + +@app.get_invocation_handler +async def get_invocation(context: InvocationContext) -> dict: + if context.invocation_id in _results: + return _results[context.invocation_id] + if context.invocation_id in _tasks: + return {"invocation_id": context.invocation_id, "status": "running"} + raise InvocationError("not_found", "Invocation not found") + + +@app.cancel_invocation_handler +async def cancel_invocation(context: InvocationContext) -> dict: + if context.invocation_id in _tasks: + _tasks[context.invocation_id].cancel() + del _tasks[context.invocation_id] + return {"invocation_id": context.invocation_id, "status": "cancelled"} + raise InvocationError("not_found", "Invocation not found") +``` + +### Streaming + +Use an async generator to stream chunks back to the client. Each yielded dict is sent as a `stream_chunk` message, followed by a `stream_end` when the generator completes. + +```python +from azure.ai.agentserver.invocations import InvocationAgentServerHost, InvocationContext + +app = InvocationAgentServerHost() + + +@app.invoke_handler +async def handle(payload: dict, context: InvocationContext): + for word in ["Hello", " ", "world", "!"]: + yield {"delta": word} +``` + +**Client**: + +```python +import asyncio, json, websockets + +async def main(): + async with websockets.connect("ws://localhost:8088/invocations/ws") as ws: + await ws.send(json.dumps({"action": "invoke", "payload": {}})) + while True: + msg = json.loads(await ws.recv()) + if msg["type"] == "stream_chunk": + print(msg["payload"]["delta"], end="", flush=True) + elif msg["type"] == "stream_end": + print("\nDone!") + break + elif msg["type"] == "ping": + await ws.send(json.dumps({"action": "pong"})) + +asyncio.run(main()) +``` + +### Multi-turn conversation + +Use the `session_id` field to group invocations into a conversation over the same WebSocket connection: + +```python +import asyncio, json, websockets + +async def main(): + async with websockets.connect("ws://localhost:8088/invocations/ws") as ws: + # First turn + await ws.send(json.dumps({ + "action": "invoke", + "session_id": "session-abc", + "payload": {"message": "My name is Alice"}, + })) + print(json.loads(await ws.recv())) + + # Second turn (same session, same connection) + await ws.send(json.dumps({ + "action": "invoke", + "session_id": "session-abc", + "payload": {"message": "What is my name?"}, + })) + print(json.loads(await ws.recv())) + +asyncio.run(main()) +``` + +### Serving an OpenAPI spec + +Pass an OpenAPI spec dict to enable the discovery endpoint at `GET /invocations/docs/openapi.json`: + +```python +app = InvocationAgentServerHost(openapi_spec={ + "openapi": "3.0.3", + "info": {"title": "My Agent", "version": "1.0.0"}, + "paths": { ... }, +}) +``` + +## Troubleshooting + +### Reporting issues + +To report an issue with the client library, or request additional features, please open a GitHub issue [here](https://github.com/Azure/azure-sdk-for-python/issues). + +## Next steps + +Visit the [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-invocations/samples) folder for complete working examples: + +| Sample | Description | +|---|---| +| [streaming_invoke_agent](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_invoke_agent/) | Streaming code-generation tokens via WebSocket | + +## Contributing + +This project welcomes contributions and suggestions. Most contributions require +you to agree to a Contributor License Agreement (CLA) declaring that you have +the right to, and actually do, grant us the rights to use your contribution. +For details, visit https://cla.microsoft.com. + +When you submit a pull request, a CLA-bot will automatically determine whether +you need to provide a CLA and decorate the PR appropriately (e.g., label, +comment). Simply follow the instructions provided by the bot. You will only +need to do this once across all repos using our CLA. + +This project has adopted the +[Microsoft Open Source Code of Conduct][code_of_conduct]. For more information, +see the Code of Conduct FAQ or contact opencode@microsoft.com with any +additional questions or comments. + +[code_of_conduct]: https://opensource.microsoft.com/codeofconduct/ diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/__init__.py b/sdk/agentserver/azure-ai-agentserver-websocket/azure/__init__.py new file mode 100644 index 000000000000..8db66d3d0f0f --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/azure/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/__init__.py b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/__init__.py new file mode 100644 index 000000000000..8db66d3d0f0f --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/__init__.py b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/__init__.py new file mode 100644 index 000000000000..8db66d3d0f0f --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/__init__.py b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/__init__.py new file mode 100644 index 000000000000..da6218812a4b --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/__init__.py @@ -0,0 +1,28 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Invocations protocol for Azure AI Hosted Agents. + +This package provides an invocation protocol host as a subclass of +:class:`~azure.ai.agentserver.core.AgentServerHost`. + +Quick start:: + + from azure.ai.agentserver.invocations import InvocationAgentServerHost + from starlette.responses import JSONResponse + + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(request): + return JSONResponse({"ok": True}) + + app.run() +""" +__path__ = __import__("pkgutil").extend_path(__path__, __name__) + +from ._invocation import InvocationAgentServerHost, InvocationContext, InvocationError +from ._version import VERSION + +__all__ = ["InvocationAgentServerHost", "InvocationContext", "InvocationError"] +__version__ = VERSION diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/_constants.py b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/_constants.py new file mode 100644 index 000000000000..5c7fd0804ec4 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/_constants.py @@ -0,0 +1,34 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + + +class InvocationConstants: + """Invocation protocol constants. + + Protocol-specific constants for the WebSocket invocation protocol. + """ + + # WebSocket message types (server → client) + MSG_TYPE_RESULT = "result" + MSG_TYPE_STREAM_CHUNK = "stream_chunk" + MSG_TYPE_STREAM_END = "stream_end" + MSG_TYPE_ERROR = "error" + MSG_TYPE_PING = "ping" + MSG_TYPE_PONG = "pong" + + # WebSocket actions (client → server) + ACTION_INVOKE = "invoke" + ACTION_GET_INVOCATION = "get_invocation" + ACTION_CANCEL_INVOCATION = "cancel_invocation" + ACTION_PING = "ping" + ACTION_PONG = "pong" + + # Keep-alive defaults + DEFAULT_WS_PING_INTERVAL = 30 # seconds + + # Span attribute keys + ATTR_SPAN_INVOCATION_ID = "azure.ai.agentserver.invocations.invocation_id" + ATTR_SPAN_SESSION_ID = "azure.ai.agentserver.invocations.session_id" + ATTR_SPAN_ERROR_CODE = "azure.ai.agentserver.invocations.error.code" + ATTR_SPAN_ERROR_MESSAGE = "azure.ai.agentserver.invocations.error.message" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/_invocation.py b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/_invocation.py new file mode 100644 index 000000000000..f2bb00137ce9 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/_invocation.py @@ -0,0 +1,660 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Invocation protocol host for Azure AI Hosted Agents (WebSocket). + +Provides the invocation protocol over WebSocket long connections +as a :class:`~azure.ai.agentserver.core.AgentServerHost` subclass. +""" +import asyncio +import contextlib +import inspect +import json +import os +import re +import uuid +from collections.abc import AsyncGenerator, Awaitable, Callable # pylint: disable=import-error +from dataclasses import dataclass +from typing import Any, Optional, Union + +from starlette.requests import Request +from starlette.responses import JSONResponse, Response +from starlette.routing import Route, WebSocketRoute +from starlette.websockets import WebSocket, WebSocketDisconnect + +from azure.ai.agentserver.core import ( # pylint: disable=no-name-in-module + AgentServerHost, + get_logger, + Constants, + create_error_response, +) + +from ._constants import InvocationConstants + +logger = get_logger() + +# Maximum length and allowed characters for user-provided IDs (defense in depth). +_MAX_ID_LENGTH = 256 +_VALID_ID_RE = re.compile(r"^[a-zA-Z0-9\-_.:]+$") + + +def _sanitize_id(value: str, fallback: str) -> str: + """Validate a user-provided ID string. + + Returns *value* unchanged when it passes validation, otherwise returns + *fallback*. This prevents excessively long or malformed IDs from + propagating into span attributes and log messages. + + :param value: The raw ID from a message field. + :type value: str + :param fallback: A safe fallback value (typically a generated UUID). + :type fallback: str + :return: The validated ID or the fallback. + :rtype: str + """ + if not value or len(value) > _MAX_ID_LENGTH or not _VALID_ID_RE.match(value): + return fallback + return value + + +@dataclass +class InvocationContext: + """Contextual information for an invocation request. + + Passed to handler functions registered via :meth:`invoke_handler`, + :meth:`get_invocation_handler`, and :meth:`cancel_invocation_handler`. + + :param invocation_id: Unique identifier for this invocation. + :type invocation_id: str + :param session_id: Session identifier for this invocation. + :type session_id: str + """ + + invocation_id: str + session_id: str + + +class InvocationError(Exception): + """Raised by handlers to signal a domain-specific error. + + :param code: Machine-readable error code. + :type code: str + :param message: Human-readable error message. + :type message: str + """ + + def __init__(self, code: str, message: str) -> None: + self.code = code + self.message = message + super().__init__(message) + + +class InvocationAgentServerHost(AgentServerHost): + """Invocation protocol host for Azure AI Hosted Agents over WebSocket. + + A :class:`~azure.ai.agentserver.core.AgentServerHost` subclass that adds + a WebSocket endpoint for the invocation protocol. Use the decorator + methods to wire handler functions to messages. + + WebSocket endpoint: ``/invocations/ws`` + + **Client → Server messages** (JSON text frames):: + + {"action": "invoke", "invocation_id": "opt", "session_id": "opt", "payload": {...}} + {"action": "get_invocation", "invocation_id": "required"} + {"action": "cancel_invocation", "invocation_id": "required"} + + **Server → Client messages** (JSON text frames):: + + {"type": "result", "invocation_id": "...", "session_id": "...", "payload": {...}} + {"type": "stream_chunk", "invocation_id": "...", "session_id": "...", "payload": {...}} + {"type": "stream_end", "invocation_id": "...", "session_id": "..."} + {"type": "error", "invocation_id": "...", "error": {"code": "...", "message": "..."}} + + Usage:: + + from azure.ai.agentserver.invocations import InvocationAgentServerHost, InvocationContext + + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(payload, context): + return {"reply": "hello"} + + app.run() + + :param openapi_spec: Optional OpenAPI spec dict. When provided, the spec + is served at ``GET /invocations/docs/openapi.json``. + :type openapi_spec: Optional[dict[str, Any]] + :param ws_ping_interval: Interval in seconds between keep-alive ping + frames sent to each connected WebSocket client. Keeps the + connection alive through Azure APIM / Load Balancer which silently + drop idle connections after ~4 minutes. Set to ``0`` to disable. + Defaults to ``30``. + :type ws_ping_interval: Optional[int] + """ + + def __init__( + self, + *, + openapi_spec: Optional[dict[str, Any]] = None, + ws_ping_interval: Optional[int] = None, + **kwargs: Any, + ) -> None: + self._invoke_fn: Optional[Callable] = None + self._get_invocation_fn: Optional[Callable] = None + self._cancel_invocation_fn: Optional[Callable] = None + self._openapi_spec = openapi_spec + self._ws_ping_interval: int = ( + ws_ping_interval + if ws_ping_interval is not None + else InvocationConstants.DEFAULT_WS_PING_INTERVAL + ) + + # Build invocation routes + invocation_routes: list[Any] = [ + Route( + "/invocations/docs/openapi.json", + self._get_openapi_spec_endpoint, + methods=["GET"], + name="get_openapi_spec", + ), + WebSocketRoute( + "/invocations/ws", + self._websocket_endpoint, + name="invocations_ws", + ), + ] + + # Merge with any routes from sibling mixins via cooperative init + existing = list(kwargs.pop("routes", None) or []) + super().__init__(routes=existing + invocation_routes, **kwargs) + + # ------------------------------------------------------------------ + # Handler decorators + # ------------------------------------------------------------------ + + def invoke_handler( + self, fn: Callable[..., Any] + ) -> Callable[..., Any]: + """Register a function as the invoke handler. + + The handler receives ``(payload: dict, context: InvocationContext)`` + and may be: + + - An async function returning a ``dict`` (non-streaming). + - An async generator yielding ``dict`` chunks (streaming). + + Usage:: + + @app.invoke_handler + async def handle(payload, context): + return {"reply": f"echo: {payload}"} + + # Streaming variant: + @app.invoke_handler + async def handle(payload, context): + for token in tokens: + yield {"token": token} + + :param fn: Async function or async generator function. + :type fn: Callable + :return: The original function (unmodified). + :rtype: Callable + :raises TypeError: If *fn* is not async. + """ + if not (inspect.iscoroutinefunction(fn) or inspect.isasyncgenfunction(fn)): + raise TypeError( + f"invoke_handler expects an async function or async generator, got {type(fn).__name__}. " + "Use 'async def' to define your handler." + ) + self._invoke_fn = fn + return fn + + def get_invocation_handler( + self, fn: Callable[..., Any] + ) -> Callable[..., Any]: + """Register a function as the get-invocation handler. + + The handler receives ``(context: InvocationContext)`` and returns + a ``dict``. + + :param fn: Async function. + :type fn: Callable + :return: The original function (unmodified). + :rtype: Callable + :raises TypeError: If *fn* is not an async function. + """ + if not inspect.iscoroutinefunction(fn): + raise TypeError( + f"get_invocation_handler expects an async function, got {type(fn).__name__}. " + "Use 'async def' to define your handler." + ) + self._get_invocation_fn = fn + return fn + + def cancel_invocation_handler( + self, fn: Callable[..., Any] + ) -> Callable[..., Any]: + """Register a function as the cancel-invocation handler. + + The handler receives ``(context: InvocationContext)`` and returns + a ``dict``. + + :param fn: Async function. + :type fn: Callable + :return: The original function (unmodified). + :rtype: Callable + :raises TypeError: If *fn* is not an async function. + """ + if not inspect.iscoroutinefunction(fn): + raise TypeError( + f"cancel_invocation_handler expects an async function, got {type(fn).__name__}. " + "Use 'async def' to define your handler." + ) + self._cancel_invocation_fn = fn + return fn + + # ------------------------------------------------------------------ + # OpenAPI spec (HTTP endpoint — documentation) + # ------------------------------------------------------------------ + + def get_openapi_spec(self) -> Optional[dict[str, Any]]: + """Return the stored OpenAPI spec, or None.""" + return self._openapi_spec + + async def _get_openapi_spec_endpoint(self, request: Request) -> Response: # pylint: disable=unused-argument + spec = self.get_openapi_spec() + if spec is None: + return create_error_response("not_found", "No OpenAPI spec registered", status_code=404) + return JSONResponse(spec) + + # ------------------------------------------------------------------ + # Span attribute helper + # ------------------------------------------------------------------ + + @staticmethod + def _safe_set_attrs(span: Any, attrs: dict[str, str]) -> None: + if span is None: + return + try: + for key, value in attrs.items(): + span.set_attribute(key, value) + except Exception: # pylint: disable=broad-exception-caught + logger.debug("Failed to set span attributes: %s", list(attrs.keys()), exc_info=True) + + # ------------------------------------------------------------------ + # Span context manager + # ------------------------------------------------------------------ + + def _request_span( + self, + headers: Any, + invocation_id: str, + span_operation: str, + operation_name: Optional[str] = None, + session_id: str = "", + ) -> Any: + """Create a request span — returns a no-op context manager when tracing is off. + + :param headers: HTTP/WebSocket handshake headers. + :type headers: any + :param invocation_id: The request/invocation ID. + :type invocation_id: str + :param span_operation: Span operation name. + :type span_operation: str + :param operation_name: Optional ``gen_ai.operation.name`` value. + :type operation_name: str or None + :param session_id: Session ID (empty string if absent). + :type session_id: str + :return: Context manager yielding the OTel span or *None*. + :rtype: any + """ + if self._tracing is not None: + return self._tracing.request_span( + headers, invocation_id, span_operation, + operation_name=operation_name, session_id=session_id, + end_on_exit=False, + ) + return contextlib.nullcontext(None) + + def _simple_request_span( + self, + headers: Any, + invocation_id: str, + span_operation: str, + session_id: str = "", + ) -> Any: + """Create a request span that auto-ends on exit. + + Used for get/cancel operations that don't need manual span lifecycle. + + :param headers: HTTP/WebSocket handshake headers. + :type headers: any + :param invocation_id: The request/invocation ID. + :type invocation_id: str + :param span_operation: Span operation name. + :type span_operation: str + :param session_id: Session ID (empty string if absent). + :type session_id: str + :return: Context manager yielding the OTel span or *None*. + :rtype: any + """ + if self._tracing is not None: + return self._tracing.request_span( + headers, invocation_id, span_operation, + session_id=session_id, + ) + return contextlib.nullcontext(None) + + # ------------------------------------------------------------------ + # WebSocket endpoint + # ------------------------------------------------------------------ + + async def _ws_ping_loop(self, websocket: WebSocket) -> None: + """Send periodic ping frames to keep the WebSocket alive. + + Azure APIM and Azure Load Balancer silently kill idle WebSocket + connections after ~4 minutes. This background task sends a + lightweight ``{"type": "ping"}`` message at a configurable + interval (default 30 s) so the connection is never considered idle. + + :param websocket: The WebSocket connection to keep alive. + :type websocket: ~starlette.websockets.WebSocket + """ + try: + while True: + await asyncio.sleep(self._ws_ping_interval) + await websocket.send_json({"type": InvocationConstants.MSG_TYPE_PING}) + except (WebSocketDisconnect, Exception): # pylint: disable=broad-exception-caught + # Connection closed or errored — let the task exit silently. + pass + + async def _websocket_endpoint(self, websocket: WebSocket) -> None: + """Main WebSocket endpoint for the invocation protocol. + + Accepts a WebSocket connection and processes JSON messages in a loop. + Each message must contain an ``action`` field. + + A background keep-alive task sends periodic ping messages to prevent + Azure APIM / Load Balancer from dropping idle connections. + + :param websocket: The WebSocket connection. + :type websocket: ~starlette.websockets.WebSocket + """ + await websocket.accept() + + # Start keep-alive ping task (disabled when interval is 0). + ping_task: Optional[asyncio.Task] = None + if self._ws_ping_interval > 0: + ping_task = asyncio.create_task(self._ws_ping_loop(websocket)) + + try: + while True: + raw = await websocket.receive_text() + + try: + message = json.loads(raw) + except (json.JSONDecodeError, ValueError): + await websocket.send_json({ + "type": InvocationConstants.MSG_TYPE_ERROR, + "error": {"code": "invalid_json", "message": "Invalid JSON message"}, + }) + continue + + if not isinstance(message, dict): + await websocket.send_json({ + "type": InvocationConstants.MSG_TYPE_ERROR, + "error": {"code": "invalid_message", "message": "Message must be a JSON object"}, + }) + continue + + action = message.get("action") + if action == InvocationConstants.ACTION_INVOKE: + await self._handle_ws_invoke(websocket, message) + elif action == InvocationConstants.ACTION_GET_INVOCATION: + await self._handle_ws_get_invocation(websocket, message) + elif action == InvocationConstants.ACTION_CANCEL_INVOCATION: + await self._handle_ws_cancel_invocation(websocket, message) + elif action == InvocationConstants.ACTION_PING: + # Client-initiated ping — respond with pong. + await websocket.send_json({"type": InvocationConstants.MSG_TYPE_PONG}) + elif action == InvocationConstants.ACTION_PONG: + # Client pong response — no-op, already kept connection alive. + pass + else: + await websocket.send_json({ + "type": InvocationConstants.MSG_TYPE_ERROR, + "error": { + "code": "invalid_action", + "message": f"Unknown action: {action}", + }, + }) + except WebSocketDisconnect: + logger.debug("WebSocket client disconnected") + except Exception: # pylint: disable=broad-exception-caught + logger.exception("Unexpected WebSocket error") + finally: + if ping_task is not None: + ping_task.cancel() + with contextlib.suppress(asyncio.CancelledError): + await ping_task + + # ------------------------------------------------------------------ + # Invoke handler + # ------------------------------------------------------------------ + + async def _handle_ws_invoke(self, websocket: WebSocket, message: dict[str, Any]) -> None: + generated_id = str(uuid.uuid4()) + raw_invocation_id = message.get("invocation_id") or "" + invocation_id = _sanitize_id(raw_invocation_id, generated_id) + + raw_session_id = ( + message.get("session_id") + or os.environ.get(Constants.FOUNDRY_AGENT_SESSION_ID) + or "" + ) + session_id = _sanitize_id(raw_session_id, str(uuid.uuid4())) + + context = InvocationContext(invocation_id=invocation_id, session_id=session_id) + payload = message.get("payload", {}) + + if self._invoke_fn is None: + await websocket.send_json({ + "type": InvocationConstants.MSG_TYPE_ERROR, + "invocation_id": invocation_id, + "session_id": session_id, + "error": { + "code": "not_implemented", + "message": "No invoke handler registered. Use the @app.invoke_handler decorator.", + }, + }) + return + + with self._request_span( + websocket.headers, invocation_id, "invoke_agent", + operation_name="invoke_agent", session_id=session_id, + ) as otel_span: + self._safe_set_attrs(otel_span, { + InvocationConstants.ATTR_SPAN_INVOCATION_ID: invocation_id, + InvocationConstants.ATTR_SPAN_SESSION_ID: session_id, + }) + + try: + if inspect.isasyncgenfunction(self._invoke_fn): + # Streaming response + async for chunk in self._invoke_fn(payload, context): + await websocket.send_json({ + "type": InvocationConstants.MSG_TYPE_STREAM_CHUNK, + "invocation_id": invocation_id, + "session_id": session_id, + "payload": chunk, + }) + await websocket.send_json({ + "type": InvocationConstants.MSG_TYPE_STREAM_END, + "invocation_id": invocation_id, + "session_id": session_id, + }) + else: + # Non-streaming response + result = await self._invoke_fn(payload, context) + await websocket.send_json({ + "type": InvocationConstants.MSG_TYPE_RESULT, + "invocation_id": invocation_id, + "session_id": session_id, + "payload": result, + }) + except InvocationError as exc: + self._safe_set_attrs(otel_span, { + InvocationConstants.ATTR_SPAN_ERROR_CODE: exc.code, + InvocationConstants.ATTR_SPAN_ERROR_MESSAGE: exc.message, + }) + if self._tracing is not None: + self._tracing.end_span(otel_span, exc=exc) + logger.error("Invocation %s failed: %s", invocation_id, exc) + await websocket.send_json({ + "type": InvocationConstants.MSG_TYPE_ERROR, + "invocation_id": invocation_id, + "session_id": session_id, + "error": {"code": exc.code, "message": exc.message}, + }) + return + except Exception as exc: # pylint: disable=broad-exception-caught + self._safe_set_attrs(otel_span, { + InvocationConstants.ATTR_SPAN_ERROR_CODE: "internal_error", + InvocationConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), + }) + if self._tracing is not None: + self._tracing.end_span(otel_span, exc=exc) + logger.error("Error processing invocation %s: %s", invocation_id, exc, exc_info=True) + await websocket.send_json({ + "type": InvocationConstants.MSG_TYPE_ERROR, + "invocation_id": invocation_id, + "session_id": session_id, + "error": {"code": "internal_error", "message": "Internal server error"}, + }) + return + + # Success — end span + if self._tracing is not None: + self._tracing.end_span(otel_span) + + # ------------------------------------------------------------------ + # Get-invocation handler + # ------------------------------------------------------------------ + + async def _handle_ws_get_invocation(self, websocket: WebSocket, message: dict[str, Any]) -> None: + invocation_id = message.get("invocation_id") or "" + if not invocation_id: + await websocket.send_json({ + "type": InvocationConstants.MSG_TYPE_ERROR, + "error": {"code": "invalid_request", "message": "invocation_id is required"}, + }) + return + + session_id = message.get("session_id") or "" + context = InvocationContext(invocation_id=invocation_id, session_id=session_id) + + with self._simple_request_span( + websocket.headers, invocation_id, "get_invocation", + session_id=session_id, + ) as otel_span: + self._safe_set_attrs(otel_span, { + InvocationConstants.ATTR_SPAN_INVOCATION_ID: invocation_id, + InvocationConstants.ATTR_SPAN_SESSION_ID: session_id, + }) + + if self._get_invocation_fn is None: + await websocket.send_json({ + "type": InvocationConstants.MSG_TYPE_ERROR, + "invocation_id": invocation_id, + "error": {"code": "not_found", "message": "get_invocation not implemented"}, + }) + return + + try: + result = await self._get_invocation_fn(context) + await websocket.send_json({ + "type": InvocationConstants.MSG_TYPE_RESULT, + "invocation_id": invocation_id, + "payload": result, + }) + except InvocationError as exc: + await websocket.send_json({ + "type": InvocationConstants.MSG_TYPE_ERROR, + "invocation_id": invocation_id, + "error": {"code": exc.code, "message": exc.message}, + }) + except Exception as exc: # pylint: disable=broad-exception-caught + self._safe_set_attrs(otel_span, { + InvocationConstants.ATTR_SPAN_ERROR_CODE: "internal_error", + InvocationConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), + }) + if self._tracing is not None: + self._tracing.record_error(otel_span, exc) + logger.error("Error in get_invocation %s: %s", invocation_id, exc, exc_info=True) + await websocket.send_json({ + "type": InvocationConstants.MSG_TYPE_ERROR, + "invocation_id": invocation_id, + "error": {"code": "internal_error", "message": "Internal server error"}, + }) + + # ------------------------------------------------------------------ + # Cancel-invocation handler + # ------------------------------------------------------------------ + + async def _handle_ws_cancel_invocation(self, websocket: WebSocket, message: dict[str, Any]) -> None: + invocation_id = message.get("invocation_id") or "" + if not invocation_id: + await websocket.send_json({ + "type": InvocationConstants.MSG_TYPE_ERROR, + "error": {"code": "invalid_request", "message": "invocation_id is required"}, + }) + return + + session_id = message.get("session_id") or "" + context = InvocationContext(invocation_id=invocation_id, session_id=session_id) + + with self._simple_request_span( + websocket.headers, invocation_id, "cancel_invocation", + session_id=session_id, + ) as otel_span: + self._safe_set_attrs(otel_span, { + InvocationConstants.ATTR_SPAN_INVOCATION_ID: invocation_id, + InvocationConstants.ATTR_SPAN_SESSION_ID: session_id, + }) + + if self._cancel_invocation_fn is None: + await websocket.send_json({ + "type": InvocationConstants.MSG_TYPE_ERROR, + "invocation_id": invocation_id, + "error": {"code": "not_found", "message": "cancel_invocation not implemented"}, + }) + return + + try: + result = await self._cancel_invocation_fn(context) + await websocket.send_json({ + "type": InvocationConstants.MSG_TYPE_RESULT, + "invocation_id": invocation_id, + "payload": result, + }) + except InvocationError as exc: + await websocket.send_json({ + "type": InvocationConstants.MSG_TYPE_ERROR, + "invocation_id": invocation_id, + "error": {"code": exc.code, "message": exc.message}, + }) + except Exception as exc: # pylint: disable=broad-exception-caught + self._safe_set_attrs(otel_span, { + InvocationConstants.ATTR_SPAN_ERROR_CODE: "internal_error", + InvocationConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), + }) + if self._tracing is not None: + self._tracing.record_error(otel_span, exc) + logger.error("Error in cancel_invocation %s: %s", invocation_id, exc, exc_info=True) + await websocket.send_json({ + "type": InvocationConstants.MSG_TYPE_ERROR, + "invocation_id": invocation_id, + "error": {"code": "internal_error", "message": "Internal server error"}, + }) diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/_version.py b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/_version.py new file mode 100644 index 000000000000..67d209a8cafd --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/_version.py @@ -0,0 +1,5 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +VERSION = "1.0.0b1" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/py.typed b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/cspell.json b/sdk/agentserver/azure-ai-agentserver-websocket/cspell.json new file mode 100644 index 000000000000..5858cd8e195b --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/cspell.json @@ -0,0 +1,26 @@ +{ + "ignoreWords": [ + "agentserver", + "appinsights", + "ASGI", + "autouse", + "caplog", + "genai", + "hypercorn", + "invocations", + "openapi", + "paramtype", + "pytestmark", + "rtype", + "starlette", + "traceparent", + "tracestate", + "tracecontext" + ], + "ignorePaths": [ + "*.csv", + "*.json", + "*.rst", + "samples/**" + ] +} diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/dev_requirements.txt b/sdk/agentserver/azure-ai-agentserver-websocket/dev_requirements.txt new file mode 100644 index 000000000000..e7af80133df7 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/dev_requirements.txt @@ -0,0 +1,7 @@ +-e ../../../eng/tools/azure-sdk-tools +../azure-ai-agentserver-core +pytest +httpx +pytest-asyncio +opentelemetry-api>=1.20.0 +opentelemetry-sdk>=1.20.0 diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/pyproject.toml b/sdk/agentserver/azure-ai-agentserver-websocket/pyproject.toml new file mode 100644 index 000000000000..2427a5757164 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/pyproject.toml @@ -0,0 +1,68 @@ +[project] +name = "azure-ai-agentserver-invocations" +dynamic = ["version", "readme"] +description = "Invocations protocol for Azure AI Hosted Agents" +requires-python = ">=3.10" +authors = [ + { name = "Microsoft Corporation", email = "azpysdkhelp@microsoft.com" }, +] +license = "MIT" +classifiers = [ + "Development Status :: 4 - Beta", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", +] +keywords = ["azure", "azure sdk", "agent", "agentserver", "invocations"] + +dependencies = [ + "azure-ai-agentserver-core>=2.0.0b1", +] + +[build-system] +requires = ["setuptools>=69", "wheel"] +build-backend = "setuptools.build_meta" + +[project.urls] +repository = "https://github.com/Azure/azure-sdk-for-python" + +[tool.setuptools.packages.find] +exclude = [ + "tests*", + "samples*", + "doc*", + "azure", + "azure.ai", + "azure.ai.agentserver", +] + +[tool.setuptools.dynamic] +version = { attr = "azure.ai.agentserver.invocations._version.VERSION" } +readme = { file = ["README.md"], content-type = "text/markdown" } + +[tool.setuptools.package-data] +"azure.ai.agentserver.invocations" = ["py.typed"] + +[tool.ruff] +line-length = 120 +target-version = "py310" +lint.select = ["E", "F", "B", "I"] +lint.ignore = [] +fix = false + +[tool.ruff.lint.isort] +known-first-party = ["azure.ai.agentserver.invocations"] +combine-as-imports = true + +[tool.azure-sdk-build] +breaking = false +mypy = true +pyright = true +verifytypes = true +pylint = true +type_check_samples = false diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/pyrightconfig.json b/sdk/agentserver/azure-ai-agentserver-websocket/pyrightconfig.json new file mode 100644 index 000000000000..f36c5a7fe0d3 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/pyrightconfig.json @@ -0,0 +1,11 @@ +{ + "reportOptionalMemberAccess": "warning", + "reportArgumentType": "warning", + "reportAttributeAccessIssue": "warning", + "reportMissingImports": "warning", + "reportGeneralTypeIssues": "warning", + "reportReturnType": "warning", + "exclude": [ + "**/samples/**" + ] +} diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/index.html b/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/index.html new file mode 100644 index 000000000000..b05e37ca2944 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/index.html @@ -0,0 +1,191 @@ + + + + + +Echo Agent WebSocket Client + + + + +
+

Echo Agent WebSocket Client

+
+ + + + Disconnected +
+
+ +
+ +
+ + +
+ + + + diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/serve_browser_client.py b/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/serve_browser_client.py new file mode 100644 index 000000000000..4bf16cff3b9e --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/serve_browser_client.py @@ -0,0 +1,31 @@ +"""Serve the browser client on a local HTTP port. + +Usage:: + + python serve_browser_client.py + python serve_browser_client.py --port 3000 +""" +import argparse +import http.server +import os +import functools + + +def main() -> None: + parser = argparse.ArgumentParser(description="Serve browser client locally") + parser.add_argument("--port", type=int, default=8080, help="Port to serve on (default: 8080)") + args = parser.parse_args() + + directory = os.path.dirname(os.path.abspath(__file__)) + handler = functools.partial(http.server.SimpleHTTPRequestHandler, directory=directory) + + with http.server.HTTPServer(("", args.port), handler) as httpd: + print(f"Serving browser client at http://localhost:{args.port}") + try: + httpd.serve_forever() + except KeyboardInterrupt: + print("\nStopped.") + + +if __name__ == "__main__": + main() diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/.dockerignore b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/.dockerignore new file mode 100644 index 000000000000..0f0d55d2aeca --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/.dockerignore @@ -0,0 +1,6 @@ +.venv +__pycache__ +*.pyc +*.pyo +*.pyd +.Python diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/Dockerfile b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/Dockerfile new file mode 100644 index 000000000000..0893a6584a1d --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/Dockerfile @@ -0,0 +1,21 @@ +FROM python:3.12-slim + +WORKDIR /app + +COPY . user_agent/ +WORKDIR /app/user_agent + +RUN if [ -f requirements.txt ]; then \ + pip install --no-input -r requirements.txt; \ + else \ + echo "No requirements.txt found"; \ + fi + +EXPOSE 8088 + +CMD bash -c '\ + if [ -f /etc/ssl/certs/adc-egress-proxy-ca.crt ]; then \ + cat /etc/ssl/certs/adc-egress-proxy-ca.crt >> /etc/ssl/certs/ca-certificates.crt && \ + cat /etc/ssl/certs/adc-egress-proxy-ca.crt >> $(python -c "import certifi; print(certifi.where())"); \ + fi && \ + python main.py' diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/README.md b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/README.md new file mode 100644 index 000000000000..b42c95bc4447 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/README.md @@ -0,0 +1,68 @@ +**IMPORTANT!** All samples and other resources made available in this GitHub repository ("samples") are designed to assist in accelerating development of agents, solutions, and agent workflows for various scenarios. Review all provided resources and carefully test output behavior in the context of your use case. AI responses may be inaccurate and AI actions should be monitored with human oversight. + +# Echo Agent — Invocations Protocol (WebSocket Streaming) + +This sample demonstrates a minimal echo agent built with [azure-ai-agentserver-invocations](https://pypi.org/project/azure-ai-agentserver-invocations/) that streams responses word-by-word using WebSocket. + +## How It Works + +The agent receives user input via the Invocations protocol over WebSocket (`ws://localhost:8088/invocations/ws`) and echoes it back with a `🔊 Echo:` prefix. Each word is streamed as a separate `stream_chunk` message, followed by a final `stream_end` signal. + +## Running Locally + +### Prerequisites + +- Python 3.10+ +- Azure CLI installed and authenticated (`az login`) + +### Install Dependencies + +```bash +pip install -r requirements.txt +``` + +### Start the Agent + +```bash +python main.py +``` + +The agent starts on `http://localhost:8088/`. + +### Test + +Using the included client: + +```bash +python streaming_client.py +python streaming_client.py --message "Hello world!" +``` + +Or using the `websockets` library directly: + +```python +import asyncio, json, websockets + +async def main(): + async with websockets.connect("ws://localhost:8088/invocations/ws") as ws: + await ws.send(json.dumps({ + "action": "invoke", + "payload": {"message": "Hello world!"} + })) + while True: + msg = json.loads(await ws.recv()) + if msg["type"] == "stream_chunk": + print(msg["payload"]["token"], end=" ", flush=True) + elif msg["type"] == "stream_end": + print("\nDone!", flush=True) + break + elif msg["type"] == "error": + print(f"Error: {msg['error']}") + break + +asyncio.run(main()) +``` + +## Deploying to Microsoft Foundry + +To deploy your agent to Microsoft Foundry, follow the deployment guide at https://github.com/microsoft/hosted-agents-vnext-private-preview/blob/main/azd-quickstart.md diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.manifest.yaml b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.manifest.yaml new file mode 100644 index 000000000000..5ad7ba61265b --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.manifest.yaml @@ -0,0 +1,16 @@ +name: echo-agent-invocations-websocket-streaming +description: > + A simple echo agent that streams responses word-by-word using the + azure-ai-agentserver-invocations SDK with WebSocket streaming. +metadata: + tags: + - AI Agent Hosting + - Azure AI AgentServer + - Invocations Protocol + - Streaming +template: + name: echo-agent-invocations-streaming + kind: hosted + protocols: + - protocol: invocations + version: v0.0.1 diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.yaml b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.yaml new file mode 100644 index 000000000000..7fe3fd65dca8 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.yaml @@ -0,0 +1,8 @@ +kind: hosted +name: echo-agent-streaming +protocols: + - protocol: invocations + version: v0.0.1 +resources: + cpu: "0.25" + memory: 0.5Gi diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/main.py b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/main.py new file mode 100644 index 000000000000..58ca066e95e7 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/main.py @@ -0,0 +1,71 @@ +# Copyright (c) Microsoft. All rights reserved. + +"""Streaming echo agent using azure-ai-agentserver-invocations (WebSocket). + +Echoes user input back as a WebSocket stream, +sending each word as a separate token chunk. + +**Server** (this file):: + + python main.py + +**Client** (using the ``websockets`` library):: + + import asyncio, json, websockets + + async def main(): + async with websockets.connect("ws://localhost:8088/invocations/ws") as ws: + await ws.send(json.dumps({ + "action": "invoke", + "payload": {"message": "Hello world!"} + })) + while True: + msg = json.loads(await ws.recv()) + if msg["type"] == "stream_chunk": + print(msg["payload"]["token"], end=" ", flush=True) + elif msg["type"] == "stream_end": + print("\\nDone!", flush=True) + break + elif msg["type"] == "error": + print(f"Error: {msg['error']}") + break + + asyncio.run(main()) +""" + +import asyncio +from collections.abc import AsyncGenerator + +from azure.ai.agentserver.invocations import InvocationAgentServerHost, InvocationContext + +ECHO_PREFIX = "🔊 Echo: " + +app = InvocationAgentServerHost() + + +@app.invoke_handler +async def handle_invoke( + payload: dict, context: InvocationContext # pylint: disable=unused-argument +) -> AsyncGenerator[dict, None]: + """Yield token chunks with simulated latency. + + Each chunk is sent as a WebSocket ``stream_chunk`` message. + A final ``stream_end`` message signals completion (handled by the framework). + + :param payload: The client request payload. + :type payload: dict + :param context: Invocation context with IDs. + :type context: InvocationContext + """ + message = payload.get( + "message", "Hello! Send me a message and I'll echo it back.") + echo_text = f"{ECHO_PREFIX}{message}" + words = echo_text.split() + + for word in words: + yield {"token": word} + await asyncio.sleep(0.1) # simulate token-by-token latency + + +if __name__ == "__main__": + app.run() diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/requirements.txt b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/requirements.txt new file mode 100644 index 000000000000..5c6c36ed6d78 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/requirements.txt @@ -0,0 +1,2 @@ +azure-ai-agentserver-invocations +websockets \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/serve_browser_client.py b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/serve_browser_client.py new file mode 100644 index 000000000000..cf39ce5ccc15 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/serve_browser_client.py @@ -0,0 +1,31 @@ +"""Serve the browser client on a local HTTP port. + +Usage:: + + python serve_browser_client.py + python serve_browser_client.py --port 3000 +""" +import argparse +import http.server +import os +import functools + + +def main() -> None: + parser = argparse.ArgumentParser(description="Serve browser client locally") + parser.add_argument("--port", type=int, default=8080, help="Port to serve on (default: 8080)") + args = parser.parse_args() + + directory = os.path.dirname(os.path.abspath(__file__)) + handler = functools.partial(http.server.SimpleHTTPRequestHandler, directory=directory) + + with http.server.HTTPServer(("", args.port), handler) as httpd: + print(f"Serving browser client at http://localhost:{args.port}/browser_client.html") + try: + httpd.serve_forever() + except KeyboardInterrupt: + print("\nStopped.") + + +if __name__ == "__main__": + main() diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_invoke_agent/requirements.txt b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_invoke_agent/requirements.txt new file mode 100644 index 000000000000..fe489ac8ac35 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_invoke_agent/requirements.txt @@ -0,0 +1,2 @@ +azure-ai-agentserver-invocations +websockets diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_invoke_agent/streaming_invoke_agent.py b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_invoke_agent/streaming_invoke_agent.py new file mode 100644 index 000000000000..568da5a41165 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_invoke_agent/streaming_invoke_agent.py @@ -0,0 +1,69 @@ +"""Streaming invoke agent example (WebSocket). + +Demonstrates returning results incrementally via WebSocket streaming. +Callers receive real-time partial output as tokens are generated. + +**Server** (this file):: + + python streaming_invoke_agent.py + +**Client** (using the ``websockets`` library):: + + import asyncio, json, websockets + + async def main(): + async with websockets.connect("ws://localhost:8088/invocations/ws") as ws: + await ws.send(json.dumps({ + "action": "invoke", + "payload": {"prompt": "Write a Calculator class with an Add method"} + })) + while True: + msg = json.loads(await ws.recv()) + if msg["type"] == "stream_chunk": + print(msg["payload"]["token"], end="", flush=True) + elif msg["type"] == "stream_end": + print("\\nDone!", flush=True) + break + elif msg["type"] == "error": + print(f"Error: {msg['error']}") + break + + asyncio.run(main()) +""" +import asyncio +from collections.abc import AsyncGenerator # pylint: disable=import-error + +from azure.ai.agentserver.invocations import InvocationAgentServerHost, InvocationContext + + +app = InvocationAgentServerHost() + +# Simulated tokens — in production these would come from a model. +_SIMULATED_TOKENS = [ + "class", " Calculator", ":", "\n", + " ", "def", " add", "(", "self", ",", " a", ",", " b", ")", ":", "\n", + " ", "return", " a", " +", " b", "\n", +] + + +@app.invoke_handler +async def handle_invoke( + payload: dict, context: InvocationContext # pylint: disable=unused-argument +) -> AsyncGenerator[dict, None]: + """Yield token chunks with simulated latency. + + Each chunk is sent as a WebSocket ``stream_chunk`` message. + A final ``stream_end`` message signals completion (handled by the framework). + + :param payload: The client request payload (unused in this demo). + :type payload: dict + :param context: Invocation context with IDs. + :type context: InvocationContext + """ + for token in _SIMULATED_TOKENS: + yield {"token": token} + await asyncio.sleep(0.15) # simulate model latency + + +if __name__ == "__main__": + app.run() diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/conftest.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/conftest.py new file mode 100644 index 000000000000..5fdd0c258b23 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/conftest.py @@ -0,0 +1,192 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Shared fixtures and factory functions for invocations WebSocket tests.""" +from typing import Any + +import pytest +from starlette.testclient import TestClient + +from azure.ai.agentserver.invocations import ( + InvocationAgentServerHost, + InvocationContext, + InvocationError, +) + + +# --------------------------------------------------------------------------- +# Sample OpenAPI spec used by several tests +# --------------------------------------------------------------------------- + +SAMPLE_OPENAPI_SPEC: dict[str, Any] = { + "openapi": "3.0.0", + "info": {"title": "Echo Agent", "version": "1.0.0"}, + "paths": { + "/invocations": { + "post": { + "requestBody": { + "required": True, + "content": { + "application/json": { + "schema": { + "type": "object", + "required": ["message"], + "properties": { + "message": {"type": "string"}, + }, + } + } + }, + }, + "responses": { + "200": { + "description": "OK", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "reply": {"type": "string"}, + }, + } + } + }, + } + }, + } + } + }, +} + + +# --------------------------------------------------------------------------- +# Factory functions +# --------------------------------------------------------------------------- + + +def _make_echo_agent(**kwargs: Any) -> InvocationAgentServerHost: + """Create an InvocationAgentServerHost whose invoke handler echoes the payload.""" + app = InvocationAgentServerHost(**kwargs) + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"echo": payload, "invocation_id": context.invocation_id} + + return app + + +def _make_streaming_agent(**kwargs: Any) -> InvocationAgentServerHost: + """Create an InvocationAgentServerHost whose invoke handler yields 3 JSON chunks.""" + app = InvocationAgentServerHost(**kwargs) + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext): + for i in range(3): + yield {"chunk": i} + + return app + + +def _make_async_storage_agent(**kwargs: Any) -> InvocationAgentServerHost: + """Create an InvocationAgentServerHost with get/cancel handlers and in-memory store.""" + app = InvocationAgentServerHost(**kwargs) + store: dict[str, dict] = {} + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + store[context.invocation_id] = payload + return {"stored": True, "invocation_id": context.invocation_id} + + @app.get_invocation_handler + async def get_handler(context: InvocationContext) -> dict: + if context.invocation_id not in store: + raise InvocationError("not_found", "Not found") + return {"data": store[context.invocation_id]} + + @app.cancel_invocation_handler + async def cancel_handler(context: InvocationContext) -> dict: + if context.invocation_id not in store: + raise InvocationError("not_found", "Not found") + del store[context.invocation_id] + return {"status": "cancelled"} + + return app + + +def _make_validated_agent() -> InvocationAgentServerHost: + """Create an InvocationAgentServerHost with OpenAPI spec.""" + app = InvocationAgentServerHost(openapi_spec=SAMPLE_OPENAPI_SPEC) + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"reply": f"echo: {payload['message']}"} + + return app + + +def _make_failing_agent(**kwargs: Any) -> InvocationAgentServerHost: + """Create an InvocationAgentServerHost whose handler raises ValueError.""" + app = InvocationAgentServerHost(**kwargs) + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + raise ValueError("something went wrong") + + return app + + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + + +@pytest.fixture() +def echo_app(): + return _make_echo_agent() + + +@pytest.fixture() +def echo_client(echo_app): + return TestClient(echo_app) + + +@pytest.fixture() +def streaming_app(): + return _make_streaming_agent() + + +@pytest.fixture() +def streaming_client(streaming_app): + return TestClient(streaming_app) + + +@pytest.fixture() +def async_storage_app(): + return _make_async_storage_agent() + + +@pytest.fixture() +def async_storage_client(async_storage_app): + return TestClient(async_storage_app) + + +@pytest.fixture() +def validated_client(): + app = _make_validated_agent() + return TestClient(app) + + +@pytest.fixture() +def no_spec_client(): + app = _make_echo_agent() + return TestClient(app) + + +@pytest.fixture() +def failing_app(): + return _make_failing_agent() + + +@pytest.fixture() +def failing_client(failing_app): + return TestClient(failing_app) diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_decorator_pattern.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_decorator_pattern.py new file mode 100644 index 000000000000..62db1498e3f3 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_decorator_pattern.py @@ -0,0 +1,209 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for decorator-based handler registration on InvocationAgentServerHost.""" +from starlette.testclient import TestClient + +from azure.ai.agentserver.invocations import ( + InvocationAgentServerHost, + InvocationContext, + InvocationError, +) + + +# --------------------------------------------------------------------------- +# invoke_handler stores function +# --------------------------------------------------------------------------- + +def test_invoke_handler_stores_function(): + """@app.invoke_handler stores the function on the protocol object.""" + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"ok": True} + + assert app._invoke_fn is handle + + +# --------------------------------------------------------------------------- +# invoke_handler returns original function +# --------------------------------------------------------------------------- + +def test_invoke_handler_returns_original_function(): + """@app.invoke_handler returns the original function.""" + app = InvocationAgentServerHost() + + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"ok": True} + + result = app.invoke_handler(handle) + assert result is handle + + +# --------------------------------------------------------------------------- +# get_invocation_handler stores function +# --------------------------------------------------------------------------- + +def test_get_invocation_handler_stores_function(): + """@app.get_invocation_handler stores the function.""" + app = InvocationAgentServerHost() + + @app.get_invocation_handler + async def get_handler(context: InvocationContext) -> dict: + return {"ok": True} + + assert app._get_invocation_fn is get_handler + + +# --------------------------------------------------------------------------- +# cancel_invocation_handler stores function +# --------------------------------------------------------------------------- + +def test_cancel_invocation_handler_stores_function(): + """@app.cancel_invocation_handler stores the function.""" + app = InvocationAgentServerHost() + + @app.cancel_invocation_handler + async def cancel_handler(context: InvocationContext) -> dict: + return {"ok": True} + + assert app._cancel_invocation_fn is cancel_handler + + +# --------------------------------------------------------------------------- +# shutdown_handler stores function +# --------------------------------------------------------------------------- + +def test_shutdown_handler_stores_function(): + """@server.shutdown_handler stores the function on the server.""" + app = InvocationAgentServerHost() + + @app.shutdown_handler + async def on_shutdown(): + pass + + assert app._shutdown_fn is on_shutdown + + +# --------------------------------------------------------------------------- +# Full request flow +# --------------------------------------------------------------------------- + +def test_full_request_flow(): + """Full lifecycle: invoke → get → cancel → get (not_found).""" + app = InvocationAgentServerHost() + store: dict[str, dict] = {} + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + store[context.invocation_id] = payload + return {"stored": True} + + @app.get_invocation_handler + async def get_handler(context: InvocationContext) -> dict: + if context.invocation_id not in store: + raise InvocationError("not_found", "Not found") + return {"data": store[context.invocation_id]} + + @app.cancel_invocation_handler + async def cancel_handler(context: InvocationContext) -> dict: + if context.invocation_id not in store: + raise InvocationError("not_found", "Not found") + del store[context.invocation_id] + return {"status": "cancelled"} + + client = TestClient(app) + with client.websocket_connect("/invocations/ws") as ws: + # Invoke + ws.send_json({"action": "invoke", "payload": {"key": "lifecycle-test"}}) + invoke_resp = ws.receive_json() + assert invoke_resp["type"] == "result" + inv_id = invoke_resp["invocation_id"] + + # Get + ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) + get_resp = ws.receive_json() + assert get_resp["type"] == "result" + assert get_resp["payload"]["data"]["key"] == "lifecycle-test" + + # Cancel + ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) + cancel_resp = ws.receive_json() + assert cancel_resp["type"] == "result" + assert cancel_resp["payload"]["status"] == "cancelled" + + # Get after cancel + ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) + get_resp2 = ws.receive_json() + assert get_resp2["type"] == "error" + assert get_resp2["error"]["code"] == "not_found" + + +# --------------------------------------------------------------------------- +# Missing optional handlers +# --------------------------------------------------------------------------- + +def test_missing_invoke_handler_returns_error(): + """Invoke without registered handler returns not_implemented error.""" + app = InvocationAgentServerHost() + client = TestClient(app) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + assert resp["type"] == "error" + assert resp["error"]["code"] == "not_implemented" + + +def test_missing_get_handler_returns_error(): + """get_invocation without registered handler returns not_found error.""" + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"ok": True} + + client = TestClient(app) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "get_invocation", "invocation_id": "some-id"}) + resp = ws.receive_json() + assert resp["type"] == "error" + assert resp["error"]["code"] == "not_found" + + +def test_missing_cancel_handler_returns_error(): + """cancel_invocation without registered handler returns not_found error.""" + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"ok": True} + + client = TestClient(app) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "cancel_invocation", "invocation_id": "some-id"}) + resp = ws.receive_json() + assert resp["type"] == "error" + assert resp["error"]["code"] == "not_found" + + +# --------------------------------------------------------------------------- +# Optional handler defaults and overrides +# --------------------------------------------------------------------------- + +def test_optional_handlers_default_none(): + """Get and cancel handlers default to None.""" + app = InvocationAgentServerHost() + assert app._get_invocation_fn is None + assert app._cancel_invocation_fn is None + + +def test_optional_handler_override(): + """Setting an optional handler replaces None.""" + app = InvocationAgentServerHost() + + @app.get_invocation_handler + async def get_handler(context: InvocationContext) -> dict: + return {"ok": True} + + assert app._get_invocation_fn is not None diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_edge_cases.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_edge_cases.py new file mode 100644 index 000000000000..87ae3040c26b --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_edge_cases.py @@ -0,0 +1,232 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Edge-case tests for InvocationAgentServerHost over WebSocket.""" +import uuid + +from starlette.testclient import TestClient + +from azure.ai.agentserver.invocations import ( + InvocationAgentServerHost, + InvocationContext, +) + + +# --------------------------------------------------------------------------- +# Unknown action +# --------------------------------------------------------------------------- + +def test_unknown_action_returns_error(echo_client): + """Sending an unknown action returns an error.""" + with echo_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "unknown_action", "payload": {}}) + resp = ws.receive_json() + assert resp["type"] == "error" + assert resp["error"]["code"] == "invalid_action" + + +# --------------------------------------------------------------------------- +# Invalid JSON +# --------------------------------------------------------------------------- + +def test_invalid_json_returns_error(echo_client): + """Sending invalid JSON returns an error but connection stays open.""" + with echo_client.websocket_connect("/invocations/ws") as ws: + ws.send_text("not valid json {{{") + resp = ws.receive_json() + assert resp["type"] == "error" + assert resp["error"]["code"] == "invalid_json" + + # Connection still works + ws.send_json({"action": "invoke", "payload": {"key": "after-error"}}) + resp2 = ws.receive_json() + assert resp2["type"] == "result" + + +def test_non_object_json_returns_error(echo_client): + """Sending a JSON array instead of object returns an error.""" + with echo_client.websocket_connect("/invocations/ws") as ws: + ws.send_text("[1, 2, 3]") + resp = ws.receive_json() + assert resp["type"] == "error" + assert resp["error"]["code"] == "invalid_message" + + +# --------------------------------------------------------------------------- +# Invocation ID handling +# --------------------------------------------------------------------------- + +def test_invocation_id_auto_generated(echo_client): + """Invocation ID is auto-generated when not provided.""" + with echo_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + assert "invocation_id" in resp + uuid.UUID(resp["invocation_id"]) + + +def test_invocation_id_accepted_from_message(echo_client): + """Server accepts invocation ID from message field.""" + custom_id = str(uuid.uuid4()) + with echo_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "invocation_id": custom_id, "payload": {}}) + resp = ws.receive_json() + assert resp["invocation_id"] == custom_id + + +def test_invocation_id_generated_when_empty(echo_client): + """When empty invocation ID is sent, server generates one.""" + with echo_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "invocation_id": "", "payload": {}}) + resp = ws.receive_json() + inv_id = resp["invocation_id"] + uuid.UUID(inv_id) + + +# --------------------------------------------------------------------------- +# Payload edge cases +# --------------------------------------------------------------------------- + +def test_large_payload(echo_client): + """Large payload (dict with big value) is handled correctly.""" + big_value = "x" * (1024 * 1024) + with echo_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {"data": big_value}}) + resp = ws.receive_json() + assert resp["type"] == "result" + assert len(resp["payload"]["echo"]["data"]) == 1024 * 1024 + + +def test_unicode_payload(echo_client): + """Unicode payload is preserved.""" + text = "Hello, 世界! 🌍" + with echo_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {"text": text}}) + resp = ws.receive_json() + assert resp["type"] == "result" + assert resp["payload"]["echo"]["text"] == text + + +# --------------------------------------------------------------------------- +# Streaming edge cases +# --------------------------------------------------------------------------- + +def test_empty_streaming(): + """Empty streaming response (no chunks) sends only stream_end.""" + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext): + return + yield # noqa: E501 — make it a generator + + client = TestClient(app) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + assert resp["type"] == "stream_end" + + +def test_streaming_has_invocation_id(): + """Streaming messages include invocation_id.""" + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext): + yield {"chunk": "data"} + + client = TestClient(app) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + assert "invocation_id" in resp + + +# --------------------------------------------------------------------------- +# Invocation lifecycle +# --------------------------------------------------------------------------- + +def test_multiple_gets(async_storage_client): + """Multiple gets for the same invocation return the same result.""" + with async_storage_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {"key": "multi-get"}}) + invoke_resp = ws.receive_json() + inv_id = invoke_resp["invocation_id"] + + for _ in range(3): + ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) + get_resp = ws.receive_json() + assert get_resp["type"] == "result" + assert get_resp["payload"]["data"]["key"] == "multi-get" + + +def test_double_cancel(async_storage_client): + """Cancelling twice: second cancel returns error.""" + with async_storage_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {"key": "cancel-twice"}}) + invoke_resp = ws.receive_json() + inv_id = invoke_resp["invocation_id"] + + ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) + cancel1 = ws.receive_json() + assert cancel1["type"] == "result" + + ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) + cancel2 = ws.receive_json() + assert cancel2["type"] == "error" + assert cancel2["error"]["code"] == "not_found" + + +def test_invoke_cancel_get(async_storage_client): + """Invoke -> cancel -> get returns not_found error.""" + with async_storage_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {"key": "icg"}}) + invoke_resp = ws.receive_json() + inv_id = invoke_resp["invocation_id"] + + ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) + ws.receive_json() # consume cancel response + + ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) + get_resp = ws.receive_json() + assert get_resp["type"] == "error" + assert get_resp["error"]["code"] == "not_found" + + +# --------------------------------------------------------------------------- +# Multiple sequential invocations on same connection +# --------------------------------------------------------------------------- + +def test_multiple_sequential_invocations(echo_client): + """Multiple sequential invocations on the same WebSocket connection.""" + with echo_client.websocket_connect("/invocations/ws") as ws: + ids = set() + for i in range(10): + ws.send_json({"action": "invoke", "payload": {"idx": i}}) + resp = ws.receive_json() + assert resp["type"] == "result" + assert resp["payload"]["echo"]["idx"] == i + ids.add(resp["invocation_id"]) + assert len(ids) == 10 + + +# --------------------------------------------------------------------------- +# get/cancel without invocation_id +# --------------------------------------------------------------------------- + +def test_get_without_invocation_id(echo_client): + """get_invocation without invocation_id returns error.""" + with echo_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "get_invocation"}) + resp = ws.receive_json() + assert resp["type"] == "error" + assert resp["error"]["code"] == "invalid_request" + + +def test_cancel_without_invocation_id(echo_client): + """cancel_invocation without invocation_id returns error.""" + with echo_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "cancel_invocation"}) + resp = ws.receive_json() + assert resp["type"] == "error" + assert resp["error"]["code"] == "invalid_request" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_get_cancel.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_get_cancel.py new file mode 100644 index 000000000000..3a2f707627cd --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_get_cancel.py @@ -0,0 +1,142 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for get_invocation and cancel_invocation actions over WebSocket.""" +from starlette.testclient import TestClient + +from azure.ai.agentserver.invocations import ( + InvocationAgentServerHost, + InvocationContext, + InvocationError, +) + + +# --------------------------------------------------------------------------- +# GET after invoke +# --------------------------------------------------------------------------- + +def test_get_after_invoke_returns_stored_result(async_storage_client): + """get_invocation after invoke returns the stored result.""" + with async_storage_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {"key": "stored-data"}}) + invoke_resp = ws.receive_json() + inv_id = invoke_resp["invocation_id"] + + ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) + get_resp = ws.receive_json() + + assert get_resp["type"] == "result" + assert get_resp["payload"]["data"]["key"] == "stored-data" + + +# --------------------------------------------------------------------------- +# GET unknown ID +# --------------------------------------------------------------------------- + +def test_get_unknown_id_returns_error(async_storage_client): + """get_invocation with unknown ID returns error.""" + with async_storage_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "get_invocation", "invocation_id": "unknown-id-12345"}) + resp = ws.receive_json() + assert resp["type"] == "error" + assert resp["error"]["code"] == "not_found" + + +# --------------------------------------------------------------------------- +# Cancel after invoke +# --------------------------------------------------------------------------- + +def test_cancel_after_invoke_returns_cancelled(async_storage_client): + """cancel_invocation after invoke returns cancelled status.""" + with async_storage_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {"key": "cancel-me"}}) + invoke_resp = ws.receive_json() + inv_id = invoke_resp["invocation_id"] + + ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) + cancel_resp = ws.receive_json() + + assert cancel_resp["type"] == "result" + assert cancel_resp["payload"]["status"] == "cancelled" + + +# --------------------------------------------------------------------------- +# Cancel unknown ID +# --------------------------------------------------------------------------- + +def test_cancel_unknown_id_returns_error(async_storage_client): + """cancel_invocation with unknown ID returns error.""" + with async_storage_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "cancel_invocation", "invocation_id": "unknown-id-12345"}) + resp = ws.receive_json() + assert resp["type"] == "error" + assert resp["error"]["code"] == "not_found" + + +# --------------------------------------------------------------------------- +# GET after cancel +# --------------------------------------------------------------------------- + +def test_get_after_cancel_returns_error(async_storage_client): + """get_invocation after cancel returns error (data removed).""" + with async_storage_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {"key": "temp"}}) + invoke_resp = ws.receive_json() + inv_id = invoke_resp["invocation_id"] + + ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) + ws.receive_json() # consume cancel response + + ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) + get_resp = ws.receive_json() + + assert get_resp["type"] == "error" + assert get_resp["error"]["code"] == "not_found" + + +# --------------------------------------------------------------------------- +# GET error returns internal_error +# --------------------------------------------------------------------------- + +def test_get_invocation_error_returns_internal_error(): + """get_invocation handler raising an exception returns internal_error.""" + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"ok": True} + + @app.get_invocation_handler + async def get_handler(context: InvocationContext) -> dict: + raise RuntimeError("get failed") + + client = TestClient(app) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "get_invocation", "invocation_id": "some-id"}) + resp = ws.receive_json() + assert resp["type"] == "error" + assert resp["error"]["code"] == "internal_error" + + +# --------------------------------------------------------------------------- +# Cancel error returns internal_error +# --------------------------------------------------------------------------- + +def test_cancel_invocation_error_returns_internal_error(): + """cancel_invocation handler raising an exception returns internal_error.""" + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"ok": True} + + @app.cancel_invocation_handler + async def cancel_handler(context: InvocationContext) -> dict: + raise RuntimeError("cancel failed") + + client = TestClient(app) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "cancel_invocation", "invocation_id": "some-id"}) + resp = ws.receive_json() + assert resp["type"] == "error" + assert resp["error"]["code"] == "internal_error" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_graceful_shutdown.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_graceful_shutdown.py new file mode 100644 index 000000000000..fd812188f077 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_graceful_shutdown.py @@ -0,0 +1,211 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for graceful shutdown with InvocationAgentServerHost.""" +import asyncio +import logging + +import pytest +from starlette.testclient import TestClient + +from azure.ai.agentserver.invocations import ( + InvocationAgentServerHost, + InvocationContext, +) + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +def _make_server_with_shutdown(**kwargs) -> tuple[InvocationAgentServerHost, list]: + """Create InvocationAgentServerHost with a tracked shutdown handler.""" + server = InvocationAgentServerHost(**kwargs) + calls: list[str] = [] + + @server.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"ok": True} + + @server.shutdown_handler + async def on_shutdown(): + calls.append("shutdown") + + return server, calls + + +# --------------------------------------------------------------------------- +# Shutdown handler registration +# --------------------------------------------------------------------------- + +def test_shutdown_handler_registered(): + """Shutdown handler is stored on the server.""" + server, _ = _make_server_with_shutdown() + assert server._shutdown_fn is not None + + +def test_shutdown_handler_not_registered(): + """Without @shutdown_handler, _shutdown_fn is None.""" + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"ok": True} + + assert app._shutdown_fn is None + + +# --------------------------------------------------------------------------- +# ASGI lifespan helper +# --------------------------------------------------------------------------- + +async def _drive_lifespan(app): + """Drive a full ASGI lifespan startup+shutdown cycle.""" + scope = {"type": "lifespan"} + startup_done = asyncio.Event() + shutdown_done = asyncio.Event() + + async def receive(): + if not startup_done.is_set(): + startup_done.set() + return {"type": "lifespan.startup"} + await asyncio.sleep(0) + return {"type": "lifespan.shutdown"} + + async def send(message): + if message["type"] == "lifespan.shutdown.complete": + shutdown_done.set() + + await app(scope, receive, send) + return shutdown_done.is_set() + + +# --------------------------------------------------------------------------- +# Shutdown handler called during lifespan +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_shutdown_handler_called_on_lifespan_exit(): + """Shutdown handler runs when the ASGI lifespan exits.""" + server, calls = _make_server_with_shutdown() + completed = await _drive_lifespan(server) + assert completed + assert "shutdown" in calls + + +# --------------------------------------------------------------------------- +# Shutdown handler timeout +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_shutdown_handler_timeout(caplog): + """Shutdown handler that exceeds timeout is warned about.""" + server = InvocationAgentServerHost(graceful_shutdown_timeout=1) + calls: list[str] = [] + + @server.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"ok": True} + + @server.shutdown_handler + async def on_shutdown(): + await asyncio.sleep(10) + calls.append("completed") + + with caplog.at_level(logging.WARNING, logger="azure.ai.agentserver"): + await _drive_lifespan(server) + + assert "completed" not in calls + assert any("did not complete" in r.message.lower() or "timeout" in r.message.lower() for r in caplog.records) + + +# --------------------------------------------------------------------------- +# Shutdown handler exception +# --------------------------------------------------------------------------- + +@pytest.mark.asyncio +async def test_shutdown_handler_exception(caplog): + """Shutdown handler that raises is caught and logged.""" + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"ok": True} + + @app.shutdown_handler + async def on_shutdown(): + raise RuntimeError("shutdown exploded") + + with caplog.at_level(logging.ERROR, logger="azure.ai.agentserver"): + await _drive_lifespan(app) + + assert any("on_shutdown" in r.message.lower() or "error" in r.message.lower() for r in caplog.records) + + +# --------------------------------------------------------------------------- +# Graceful shutdown timeout config +# --------------------------------------------------------------------------- + +def test_default_graceful_shutdown_timeout(): + """Default graceful shutdown timeout is 30 seconds.""" + app = InvocationAgentServerHost() + assert app._graceful_shutdown_timeout == 30 + + +def test_custom_graceful_shutdown_timeout(): + """Custom graceful_shutdown_timeout is stored.""" + server = InvocationAgentServerHost(graceful_shutdown_timeout=60) + assert server._graceful_shutdown_timeout == 60 + + +def test_zero_graceful_shutdown_timeout(): + """Zero timeout disables the drain period.""" + server = InvocationAgentServerHost(graceful_shutdown_timeout=0) + assert server._graceful_shutdown_timeout == 0 + + +# --------------------------------------------------------------------------- +# Health endpoint accessible during normal operation +# --------------------------------------------------------------------------- + +def test_health_endpoint_during_operation(): + """GET /readiness returns 200 during normal operation.""" + server, _ = _make_server_with_shutdown() + client = TestClient(server) + resp = client.get("/readiness") + assert resp.status_code == 200 + assert resp.json() == {"status": "healthy"} + + +# --------------------------------------------------------------------------- +# No shutdown handler is no-op +# --------------------------------------------------------------------------- + +def test_no_shutdown_handler_is_noop(): + """Without a shutdown handler, WebSocket and lifespan work fine.""" + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"ok": True} + + client = TestClient(app) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + assert resp["type"] == "result" + + +# --------------------------------------------------------------------------- +# Multiple requests before shutdown +# --------------------------------------------------------------------------- + +def test_multiple_requests_before_shutdown(): + """Multiple requests can be served on the same WebSocket connection.""" + server, _ = _make_server_with_shutdown() + client = TestClient(server) + with client.websocket_connect("/invocations/ws") as ws: + for i in range(5): + ws.send_json({"action": "invoke", "payload": {"idx": i}}) + resp = ws.receive_json() + assert resp["type"] == "result" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_invoke.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_invoke.py new file mode 100644 index 000000000000..e1cc091f9f89 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_invoke.py @@ -0,0 +1,133 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for the invoke action over WebSocket.""" +import uuid + +from azure.ai.agentserver.invocations import InvocationContext + + +# --------------------------------------------------------------------------- +# Echo payload +# --------------------------------------------------------------------------- + +def test_invoke_echo_payload(echo_client): + """Invoke echoes the payload back.""" + with echo_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {"msg": "hello world"}}) + resp = ws.receive_json() + assert resp["type"] == "result" + assert resp["payload"]["echo"]["msg"] == "hello world" + + +# --------------------------------------------------------------------------- +# IDs +# --------------------------------------------------------------------------- + +def test_invoke_returns_invocation_id(echo_client): + """Response includes a valid UUID invocation_id.""" + with echo_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + assert "invocation_id" in resp + uuid.UUID(resp["invocation_id"]) + + +def test_invoke_returns_session_id(echo_client): + """Response includes a valid UUID session_id.""" + with echo_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + assert "session_id" in resp + uuid.UUID(resp["session_id"]) + + +def test_invoke_unique_invocation_ids(echo_client): + """Each invoke gets a unique invocation ID.""" + ids = set() + with echo_client.websocket_connect("/invocations/ws") as ws: + for _ in range(5): + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + ids.add(resp["invocation_id"]) + assert len(ids) == 5 + + +def test_invoke_accepts_custom_invocation_id(echo_client): + """If the message includes invocation_id, the server uses it.""" + custom_id = str(uuid.uuid4()) + with echo_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "invocation_id": custom_id, "payload": {}}) + resp = ws.receive_json() + assert resp["invocation_id"] == custom_id + + +# --------------------------------------------------------------------------- +# Streaming +# --------------------------------------------------------------------------- + +def test_streaming_returns_chunks(streaming_client): + """Streaming handler yields 3 chunks then stream_end.""" + with streaming_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + chunks = [] + while True: + resp = ws.receive_json() + if resp["type"] == "stream_chunk": + chunks.append(resp["payload"]) + elif resp["type"] == "stream_end": + break + assert len(chunks) == 3 + for i, chunk in enumerate(chunks): + assert chunk == {"chunk": i} + + +def test_streaming_has_invocation_id(streaming_client): + """Streaming messages include invocation_id.""" + with streaming_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + assert "invocation_id" in resp + uuid.UUID(resp["invocation_id"]) + + +# --------------------------------------------------------------------------- +# Empty payload +# --------------------------------------------------------------------------- + +def test_invoke_empty_payload(echo_client): + """Empty payload doesn't crash the server.""" + with echo_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + assert resp["type"] == "result" + + +# --------------------------------------------------------------------------- +# Error handling +# --------------------------------------------------------------------------- + +def test_invoke_error_returns_error(failing_client): + """Handler exception returns error message.""" + with failing_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + assert resp["type"] == "error" + assert resp["error"]["code"] == "internal_error" + assert resp["error"]["message"] == "Internal server error" + + +def test_invoke_error_has_invocation_id(failing_client): + """Error response still includes invocation_id.""" + with failing_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + assert "invocation_id" in resp + + +def test_error_hides_details_by_default(failing_client): + """Exception message is hidden in error responses.""" + with failing_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + assert "something went wrong" not in resp["error"]["message"] diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_multimodal_protocol.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_multimodal_protocol.py new file mode 100644 index 000000000000..42aa6c4107e4 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_multimodal_protocol.py @@ -0,0 +1,178 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for varied payloads with InvocationAgentServerHost over WebSocket.""" +import base64 + +from starlette.testclient import TestClient + +from azure.ai.agentserver.invocations import ( + InvocationAgentServerHost, + InvocationContext, +) + + +# --------------------------------------------------------------------------- +# Helper: echo agent with content type tracking +# --------------------------------------------------------------------------- + +def _make_content_type_echo_agent() -> InvocationAgentServerHost: + """Agent that echoes payload and notes the content_type field.""" + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return { + "echo": payload, + "received_content_type": payload.get("content_type", "unknown"), + } + + return app + + +def _make_sse_agent() -> InvocationAgentServerHost: + """Agent that returns streaming chunks.""" + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext): + for i in range(3): + yield {"event": i} + + return app + + +# --------------------------------------------------------------------------- +# Various content types (base64-encoded binary data in JSON) +# --------------------------------------------------------------------------- + +def test_png_payload(): + """PNG content type payload is accepted and echoed.""" + server = _make_content_type_echo_agent() + client = TestClient(server) + fake_png = b"\x89PNG\r\n\x1a\n" + b"\x00" * 100 + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({ + "action": "invoke", + "payload": { + "content_type": "image/png", + "data_base64": base64.b64encode(fake_png).decode(), + }, + }) + resp = ws.receive_json() + assert resp["type"] == "result" + assert resp["payload"]["received_content_type"] == "image/png" + assert base64.b64decode(resp["payload"]["echo"]["data_base64"]) == fake_png + + +def test_jpeg_payload(): + """JPEG content type payload is accepted.""" + server = _make_content_type_echo_agent() + client = TestClient(server) + fake_jpeg = b"\xff\xd8\xff\xe0" + b"\x00" * 100 + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({ + "action": "invoke", + "payload": { + "content_type": "image/jpeg", + "data_base64": base64.b64encode(fake_jpeg).decode(), + }, + }) + resp = ws.receive_json() + assert resp["type"] == "result" + assert resp["payload"]["received_content_type"] == "image/jpeg" + + +def test_wav_payload(): + """WAV audio content type payload is accepted.""" + server = _make_content_type_echo_agent() + client = TestClient(server) + fake_wav = b"RIFF" + b"\x00" * 100 + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({ + "action": "invoke", + "payload": { + "content_type": "audio/wav", + "data_base64": base64.b64encode(fake_wav).decode(), + }, + }) + resp = ws.receive_json() + assert resp["type"] == "result" + assert resp["payload"]["received_content_type"] == "audio/wav" + + +def test_text_plain_payload(): + """text/plain content type payload is accepted.""" + server = _make_content_type_echo_agent() + client = TestClient(server) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({ + "action": "invoke", + "payload": { + "content_type": "text/plain", + "data": "Hello, world!", + }, + }) + resp = ws.receive_json() + assert resp["type"] == "result" + assert resp["payload"]["echo"]["data"] == "Hello, world!" + + +# --------------------------------------------------------------------------- +# Query-like parameters in payload +# --------------------------------------------------------------------------- + +def test_params_in_payload(): + """Arbitrary parameters are accessible in the handler payload.""" + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"name": payload.get("name", "unknown")} + + client = TestClient(app) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {"name": "Alice"}}) + resp = ws.receive_json() + assert resp["type"] == "result" + assert resp["payload"]["name"] == "Alice" + + +# --------------------------------------------------------------------------- +# Streaming +# --------------------------------------------------------------------------- + +def test_streaming_chunks(): + """Streaming response sends multiple chunks.""" + server = _make_sse_agent() + client = TestClient(server) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + chunks = [] + while True: + resp = ws.receive_json() + if resp["type"] == "stream_chunk": + chunks.append(resp["payload"]) + elif resp["type"] == "stream_end": + break + assert len(chunks) == 3 + for i, chunk in enumerate(chunks): + assert chunk == {"event": i} + + +# --------------------------------------------------------------------------- +# Health endpoint +# --------------------------------------------------------------------------- + +def test_health_endpoint_returns_200(): + """GET /readiness returns 200 with healthy status.""" + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"ok": True} + + client = TestClient(app) + resp = client.get("/readiness") + assert resp.status_code == 200 + assert resp.json() == {"status": "healthy"} diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_request_limits.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_request_limits.py new file mode 100644 index 000000000000..cda62064c13f --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_request_limits.py @@ -0,0 +1,44 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for request processing (timeout feature removed per spec alignment).""" +import asyncio + +import pytest +from starlette.testclient import TestClient + +from azure.ai.agentserver.invocations import ( + InvocationAgentServerHost, + InvocationContext, +) + + +# --------------------------------------------------------------------------- +# InvocationAgentServerHost no longer accepts request_timeout +# --------------------------------------------------------------------------- + +def test_no_request_timeout_parameter(): + """InvocationAgentServerHost no longer accepts request_timeout.""" + with pytest.raises(TypeError): + InvocationAgentServerHost(request_timeout=10) + + +# --------------------------------------------------------------------------- +# Slow invoke completes without timeout +# --------------------------------------------------------------------------- + +def test_slow_invoke_completes(): + """Without timeout, handler runs to completion.""" + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + await asyncio.sleep(0.1) + return {"status": "done"} + + client = TestClient(app) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + assert resp["type"] == "result" + assert resp["payload"]["status"] == "done" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_server_routes.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_server_routes.py new file mode 100644 index 000000000000..227f89fd74d5 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_server_routes.py @@ -0,0 +1,104 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for basic server route registration with InvocationAgentServerHost.""" +import uuid + +from starlette.testclient import TestClient + +from azure.ai.agentserver.invocations import ( + InvocationAgentServerHost, + InvocationContext, +) + +from conftest import SAMPLE_OPENAPI_SPEC + + +# --------------------------------------------------------------------------- +# WebSocket connection /invocations/ws +# --------------------------------------------------------------------------- + +def test_websocket_invoke_returns_result(echo_client): + """Invoke via WebSocket returns a result.""" + with echo_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {"test": True}}) + resp = ws.receive_json() + assert resp["type"] == "result" + + +# --------------------------------------------------------------------------- +# Invocation ID is valid UUID +# --------------------------------------------------------------------------- + +def test_invoke_returns_uuid_invocation_id(echo_client): + """Invoke returns a valid UUID invocation ID.""" + with echo_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + inv_id = resp["invocation_id"] + parsed = uuid.UUID(inv_id) + assert str(parsed) == inv_id + + +# --------------------------------------------------------------------------- +# GET openapi spec returns 404 when not set +# --------------------------------------------------------------------------- + +def test_get_openapi_spec_returns_404_when_not_set(no_spec_client): + """GET /invocations/docs/openapi.json returns 404 when no spec registered.""" + resp = no_spec_client.get("/invocations/docs/openapi.json") + assert resp.status_code == 404 + + +# --------------------------------------------------------------------------- +# GET openapi spec returns spec when registered +# --------------------------------------------------------------------------- + +def test_get_openapi_spec_returns_spec_when_registered(): + """GET /invocations/docs/openapi.json returns the spec when registered.""" + app = InvocationAgentServerHost(openapi_spec=SAMPLE_OPENAPI_SPEC) + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"ok": True} + + client = TestClient(app) + resp = client.get("/invocations/docs/openapi.json") + assert resp.status_code == 200 + assert resp.json() == SAMPLE_OPENAPI_SPEC + + +# --------------------------------------------------------------------------- +# get_invocation returns not_found error by default +# --------------------------------------------------------------------------- + +def test_get_invocation_returns_not_found_default(echo_client): + """get_invocation without handler returns not_found error.""" + with echo_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "get_invocation", "invocation_id": "some-id"}) + resp = ws.receive_json() + assert resp["type"] == "error" + assert resp["error"]["code"] == "not_found" + + +# --------------------------------------------------------------------------- +# cancel returns not_found error by default +# --------------------------------------------------------------------------- + +def test_cancel_invocation_returns_not_found_default(echo_client): + """cancel_invocation without handler returns not_found error.""" + with echo_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "cancel_invocation", "invocation_id": "some-id"}) + resp = ws.receive_json() + assert resp["type"] == "error" + assert resp["error"]["code"] == "not_found" + + +# --------------------------------------------------------------------------- +# Unknown HTTP route returns 404 +# --------------------------------------------------------------------------- + +def test_unknown_route_returns_404(echo_client): + """Unknown route returns 404.""" + resp = echo_client.get("/nonexistent") + assert resp.status_code == 404 diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_session_id.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_session_id.py new file mode 100644 index 000000000000..f56dba92eadd --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_session_id.py @@ -0,0 +1,104 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for session ID resolution over WebSocket.""" +import os +import uuid +from unittest.mock import patch + +from starlette.testclient import TestClient + +from azure.ai.agentserver.invocations import ( + InvocationAgentServerHost, + InvocationContext, +) + + +# --------------------------------------------------------------------------- +# Invoke response has session_id +# --------------------------------------------------------------------------- + +def test_invoke_has_session_id(echo_client): + """Invoke response includes session_id.""" + with echo_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + assert "session_id" in resp + uuid.UUID(resp["session_id"]) + + +# --------------------------------------------------------------------------- +# Invoke with session_id in message uses that value +# --------------------------------------------------------------------------- + +def test_invoke_with_session_id_in_message(): + """Invoke with session_id in message uses that value.""" + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"ok": True} + + client = TestClient(app) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({ + "action": "invoke", + "session_id": "my-custom-session", + "payload": {}, + }) + resp = ws.receive_json() + assert resp["session_id"] == "my-custom-session" + + +# --------------------------------------------------------------------------- +# Invoke with env var +# --------------------------------------------------------------------------- + +def test_invoke_uses_env_var(): + """Invoke uses FOUNDRY_AGENT_SESSION_ID env var when no session_id in message.""" + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"ok": True} + + client = TestClient(app) + with patch.dict(os.environ, {"FOUNDRY_AGENT_SESSION_ID": "env-session"}): + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + assert resp["session_id"] == "env-session" + + +# --------------------------------------------------------------------------- +# get_invocation does NOT include session_id (not part of get protocol) +# --------------------------------------------------------------------------- + +def test_get_invocation_no_session_id(async_storage_client): + """get_invocation response does not include session_id.""" + with async_storage_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {"key": "data"}}) + invoke_resp = ws.receive_json() + inv_id = invoke_resp["invocation_id"] + + ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) + get_resp = ws.receive_json() + assert get_resp["type"] == "result" + assert "session_id" not in get_resp + + +# --------------------------------------------------------------------------- +# cancel_invocation does NOT include session_id +# --------------------------------------------------------------------------- + +def test_cancel_invocation_no_session_id(async_storage_client): + """cancel_invocation response does not include session_id.""" + with async_storage_client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {"key": "data"}}) + invoke_resp = ws.receive_json() + inv_id = invoke_resp["invocation_id"] + + ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) + cancel_resp = ws.receive_json() + assert cancel_resp["type"] == "result" + assert "session_id" not in cancel_resp diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_span_parenting.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_span_parenting.py new file mode 100644 index 000000000000..04c9babd60a4 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_span_parenting.py @@ -0,0 +1,124 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests that the invoke_agent span is set as the current span in context, +so that child spans created by framework handlers are correctly parented. +""" +import os +from unittest.mock import patch + +import pytest +from starlette.testclient import TestClient + +from azure.ai.agentserver.invocations import ( + InvocationAgentServerHost, + InvocationContext, +) + + +try: + from opentelemetry import trace + from opentelemetry.sdk.trace import TracerProvider as SdkTracerProvider + from opentelemetry.sdk.trace.export import SimpleSpanProcessor + from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter + + _HAS_OTEL = True +except ImportError: + _HAS_OTEL = False + +if _HAS_OTEL: + _EXPORTER = InMemorySpanExporter() + _PROVIDER = SdkTracerProvider() + _PROVIDER.add_span_processor(SimpleSpanProcessor(_EXPORTER)) + trace.set_tracer_provider(_PROVIDER) +else: + _EXPORTER = None + +pytestmark = pytest.mark.skipif(not _HAS_OTEL, reason="opentelemetry not installed") + + +@pytest.fixture(autouse=True) +def _clear(): + if _EXPORTER: + _EXPORTER.clear() + + +def _get_spans(): + return list(_EXPORTER.get_finished_spans()) if _EXPORTER else [] + + +def _make_server_with_child_span(): + """Server whose handler creates a child span (simulating a framework).""" + with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): + with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): + app = InvocationAgentServerHost() + child_tracer = trace.get_tracer("test.framework") + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + with child_tracer.start_as_current_span("framework_invoke_agent") as _span: + return {"ok": True} + + return app + + +def _make_streaming_server_with_child_span(): + """Server with streaming response whose handler creates a child span.""" + with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): + with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): + app = InvocationAgentServerHost() + child_tracer = trace.get_tracer("test.framework") + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext): + with child_tracer.start_as_current_span("framework_invoke_agent"): + yield {"chunk": "data"} + + return app + + +def _assert_child_parented(spans, streaming=False): + """Assert the framework span is a child of the invoke_agent span.""" + parent_spans = [s for s in spans if "invoke_agent" in s.name and s.name != "framework_invoke_agent"] + child_spans = [s for s in spans if s.name == "framework_invoke_agent"] + + assert len(parent_spans) >= 1, f"Expected invoke_agent span, got: {[s.name for s in spans]}" + assert len(child_spans) == 1, f"Expected framework span, got: {[s.name for s in spans]}" + + parent = parent_spans[0] + child = child_spans[0] + + label = "streaming" if streaming else "non-streaming" + assert child.parent is not None, f"Framework span has no parent in {label} case" + assert child.parent.span_id == parent.context.span_id, ( + f"Framework span parent ({format(child.parent.span_id, '016x')}) " + f"!= invoke_agent span ({format(parent.context.span_id, '016x')}). " + f"Spans are siblings, not parent-child ({label})." + ) + + +def test_framework_span_is_child_of_invoke_span(): + """A span created inside the handler should be a child of the + agentserver invoke_agent span, not a sibling.""" + server = _make_server_with_child_span() + client = TestClient(server) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + assert resp["type"] == "result" + + _assert_child_parented(_get_spans(), streaming=False) + + +def test_framework_span_is_child_streaming(): + """Same parent-child relationship holds for streaming responses.""" + server = _make_streaming_server_with_child_span() + client = TestClient(server) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + while True: + resp = ws.receive_json() + if resp["type"] == "stream_end": + break + + _assert_child_parented(_get_spans(), streaming=True) diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_tracing.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_tracing.py new file mode 100644 index 000000000000..85b7555e5ae0 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_tracing.py @@ -0,0 +1,356 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for OpenTelemetry tracing in the WebSocket invocations protocol.""" +import os +import uuid +from unittest.mock import patch + +import pytest +from starlette.testclient import TestClient + +from azure.ai.agentserver.invocations import ( + InvocationAgentServerHost, + InvocationContext, + InvocationError, +) + + +# --------------------------------------------------------------------------- +# Module-level OTel setup with in-memory exporter +# --------------------------------------------------------------------------- + +try: + from opentelemetry import trace + from opentelemetry.sdk.trace import TracerProvider as SdkTracerProvider + from opentelemetry.sdk.trace.export import SimpleSpanProcessor + from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter + + _HAS_OTEL = True +except ImportError: + _HAS_OTEL = False + +if _HAS_OTEL: + _MODULE_EXPORTER = InMemorySpanExporter() + _MODULE_PROVIDER = SdkTracerProvider() + _MODULE_PROVIDER.add_span_processor(SimpleSpanProcessor(_MODULE_EXPORTER)) + # If a provider was already set (e.g. by test_span_parenting), add our + # exporter to the existing provider as well, so we capture spans regardless + # of module import order. + existing = trace.get_tracer_provider() + if isinstance(existing, SdkTracerProvider) and existing is not _MODULE_PROVIDER: + existing.add_span_processor(SimpleSpanProcessor(_MODULE_EXPORTER)) + else: + trace.set_tracer_provider(_MODULE_PROVIDER) +else: + _MODULE_EXPORTER = None + _MODULE_PROVIDER = None + +pytestmark = pytest.mark.skipif(not _HAS_OTEL, reason="opentelemetry not installed") + + +@pytest.fixture(autouse=True) +def _clear_spans(): + """Clear exported spans before each test.""" + if _MODULE_EXPORTER: + _MODULE_EXPORTER.clear() + + +def _get_spans(): + """Return all captured spans.""" + if _MODULE_EXPORTER: + return _MODULE_EXPORTER.get_finished_spans() + return [] + + +# --------------------------------------------------------------------------- +# Helper: create tracing-enabled server +# --------------------------------------------------------------------------- + +def _make_tracing_server(**kwargs): + """Create an InvocationAgentServerHost with tracing enabled.""" + with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): + with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): + server = InvocationAgentServerHost(**kwargs) + + @server.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"echo": payload} + + return server + + +def _make_tracing_server_with_get_cancel(**kwargs): + """Create a tracing-enabled server with get/cancel handlers.""" + with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): + with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): + server = InvocationAgentServerHost(**kwargs) + + store: dict[str, dict] = {} + + @server.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + store[context.invocation_id] = payload + return {"stored": True} + + @server.get_invocation_handler + async def get_handler(context: InvocationContext) -> dict: + if context.invocation_id in store: + return {"data": store[context.invocation_id]} + raise InvocationError("not_found", "Not found") + + @server.cancel_invocation_handler + async def cancel_handler(context: InvocationContext) -> dict: + if context.invocation_id in store: + del store[context.invocation_id] + return {"status": "cancelled"} + raise InvocationError("not_found", "Not found") + + return server + + +def _make_failing_tracing_server(**kwargs): + """Create a tracing-enabled server whose handler raises.""" + with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): + with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): + server = InvocationAgentServerHost(**kwargs) + + @server.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + raise ValueError("tracing error test") + + return server + + +def _make_streaming_tracing_server(**kwargs): + """Create a tracing-enabled server with streaming response.""" + with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): + with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): + server = InvocationAgentServerHost(**kwargs) + + @server.invoke_handler + async def handle(payload: dict, context: InvocationContext): + yield {"chunk": 1} + yield {"chunk": 2} + + return server + + +# --------------------------------------------------------------------------- +# Tracing disabled by default +# --------------------------------------------------------------------------- + +def test_tracing_disabled_by_default(): + """No spans are created when tracing is not enabled.""" + if _MODULE_EXPORTER: + _MODULE_EXPORTER.clear() + + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"ok": True} + + client = TestClient(app) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + ws.receive_json() + + spans = _get_spans() + invoke_spans = [s for s in spans if "invoke_agent" in s.name] + assert len(invoke_spans) == 0 + + +# --------------------------------------------------------------------------- +# Tracing enabled creates invoke span +# --------------------------------------------------------------------------- + +def test_tracing_enabled_creates_invoke_span(): + """Tracing enabled creates a span named 'invoke_agent'.""" + server = _make_tracing_server() + client = TestClient(server) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + ws.receive_json() + + spans = _get_spans() + invoke_spans = [s for s in spans if "invoke_agent" in s.name] + assert len(invoke_spans) >= 1 + assert invoke_spans[0].name.startswith("invoke_agent") + + +# --------------------------------------------------------------------------- +# Invoke error records exception +# --------------------------------------------------------------------------- + +def test_invoke_error_records_exception(): + """When handler raises, the span records the exception.""" + server = _make_failing_tracing_server() + client = TestClient(server) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + assert resp["type"] == "error" + + spans = _get_spans() + invoke_spans = [s for s in spans if "invoke_agent" in s.name] + assert len(invoke_spans) >= 1 + span = invoke_spans[0] + assert span.status.status_code.name == "ERROR" + + +# --------------------------------------------------------------------------- +# GET/cancel create spans +# --------------------------------------------------------------------------- + +def test_get_invocation_creates_span(): + """get_invocation creates a span.""" + server = _make_tracing_server_with_get_cancel() + client = TestClient(server) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {"key": "data"}}) + invoke_resp = ws.receive_json() + inv_id = invoke_resp["invocation_id"] + + ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) + ws.receive_json() + + spans = _get_spans() + get_spans = [s for s in spans if "get_invocation" in s.name] + assert len(get_spans) >= 1 + + +def test_cancel_invocation_creates_span(): + """cancel_invocation creates a span.""" + server = _make_tracing_server_with_get_cancel() + client = TestClient(server) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {"key": "data"}}) + invoke_resp = ws.receive_json() + inv_id = invoke_resp["invocation_id"] + + ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) + ws.receive_json() + + spans = _get_spans() + cancel_spans = [s for s in spans if "cancel_invocation" in s.name] + assert len(cancel_spans) >= 1 + + +# --------------------------------------------------------------------------- +# Tracing via env var +# --------------------------------------------------------------------------- + +def test_tracing_via_appinsights_env_var(): + """Tracing is enabled when APPLICATIONINSIGHTS_CONNECTION_STRING is set.""" + with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): + with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"ok": True} + + client = TestClient(app) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + ws.receive_json() + + spans = _get_spans() + invoke_spans = [s for s in spans if "invoke_agent" in s.name] + assert len(invoke_spans) >= 1 + + +# --------------------------------------------------------------------------- +# No tracing when no endpoints configured +# --------------------------------------------------------------------------- + +def test_no_tracing_when_no_endpoints(): + """Tracing is disabled when no connection string or OTLP endpoint is set.""" + env = os.environ.copy() + env.pop("APPLICATIONINSIGHTS_CONNECTION_STRING", None) + env.pop("OTEL_EXPORTER_OTLP_ENDPOINT", None) + with patch.dict(os.environ, env, clear=True): + app = InvocationAgentServerHost() + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"ok": True} + + if _MODULE_EXPORTER: + _MODULE_EXPORTER.clear() + + client = TestClient(app) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + ws.receive_json() + + spans = _get_spans() + invoke_spans = [s for s in spans if "invoke_agent" in s.name] + assert len(invoke_spans) == 0 + + +# --------------------------------------------------------------------------- +# Streaming spans +# --------------------------------------------------------------------------- + +def test_streaming_creates_span(): + """Streaming response creates and completes a span.""" + server = _make_streaming_tracing_server() + client = TestClient(server) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + # Consume all streaming messages + while True: + resp = ws.receive_json() + if resp["type"] == "stream_end": + break + + spans = _get_spans() + invoke_spans = [s for s in spans if "invoke_agent" in s.name] + assert len(invoke_spans) >= 1 + + +# --------------------------------------------------------------------------- +# GenAI attributes on invoke span +# --------------------------------------------------------------------------- + +def test_genai_attributes_on_invoke_span(): + """Invoke span has GenAI semantic convention attributes.""" + server = _make_tracing_server() + client = TestClient(server) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + ws.receive_json() + + spans = _get_spans() + invoke_spans = [s for s in spans if "invoke_agent" in s.name] + assert len(invoke_spans) >= 1 + attrs = dict(invoke_spans[0].attributes) + + assert attrs.get("gen_ai.provider.name") == "AzureAI Hosted Agents" + assert attrs.get("gen_ai.system") == "azure.ai.agentserver" + assert attrs.get("service.name") == "azure.ai.agentserver" + + +# --------------------------------------------------------------------------- +# Session ID in gen_ai.conversation.id +# --------------------------------------------------------------------------- + +def test_session_id_in_conversation_id(): + """Session ID is set as gen_ai.conversation.id on invoke span.""" + server = _make_tracing_server() + client = TestClient(server) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({ + "action": "invoke", + "session_id": "test-session", + "payload": {}, + }) + ws.receive_json() + + spans = _get_spans() + invoke_spans = [s for s in spans if "invoke_agent" in s.name] + assert len(invoke_spans) >= 1 + attrs = dict(invoke_spans[0].attributes) + assert attrs.get("gen_ai.conversation.id") == "test-session" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_ws_keepalive.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_ws_keepalive.py new file mode 100644 index 000000000000..0cffdc2b1b55 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_ws_keepalive.py @@ -0,0 +1,89 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for WebSocket ping/pong keep-alive.""" +from starlette.testclient import TestClient + +from azure.ai.agentserver.invocations import ( + InvocationAgentServerHost, + InvocationContext, +) + + +def _make_echo_app(**kwargs): + app = InvocationAgentServerHost(**kwargs) + + @app.invoke_handler + async def handle(payload: dict, context: InvocationContext) -> dict: + return {"echo": payload} + + return app + + +# --------------------------------------------------------------------------- +# Client-initiated ping → server responds with pong +# --------------------------------------------------------------------------- + +def test_client_ping_gets_pong(): + """Server replies with pong when client sends a ping action.""" + client = TestClient(_make_echo_app()) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "ping"}) + resp = ws.receive_json() + assert resp["type"] == "pong" + + +def test_client_ping_does_not_interrupt_invoke(): + """A ping/pong exchange between invocations doesn't break the connection.""" + client = TestClient(_make_echo_app()) + with client.websocket_connect("/invocations/ws") as ws: + # Normal invoke + ws.send_json({"action": "invoke", "payload": {"n": 1}}) + r1 = ws.receive_json() + assert r1["type"] == "result" + + # Ping/pong + ws.send_json({"action": "ping"}) + pong = ws.receive_json() + assert pong["type"] == "pong" + + # Another invoke still works + ws.send_json({"action": "invoke", "payload": {"n": 2}}) + r2 = ws.receive_json() + assert r2["type"] == "result" + assert r2["payload"]["echo"]["n"] == 2 + + +def test_client_pong_is_accepted_silently(): + """Server accepts pong action without returning an error.""" + client = TestClient(_make_echo_app()) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "pong"}) + # No response expected for pong — verify next invoke still works. + ws.send_json({"action": "invoke", "payload": {"ok": True}}) + resp = ws.receive_json() + assert resp["type"] == "result" + + +# --------------------------------------------------------------------------- +# ws_ping_interval=0 disables server-side pings +# --------------------------------------------------------------------------- + +def test_ping_disabled_with_zero_interval(): + """Setting ws_ping_interval=0 disables the background ping task.""" + app = _make_echo_app(ws_ping_interval=0) + client = TestClient(app) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + assert resp["type"] == "result" + + +def test_custom_ping_interval(): + """A custom ws_ping_interval is accepted without error.""" + app = _make_echo_app(ws_ping_interval=15) + client = TestClient(app) + with client.websocket_connect("/invocations/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + assert resp["type"] == "result" From 9ec4bee95c5eb8989c42643fddf63f9f545b6086 Mon Sep 17 00:00:00 2001 From: Xinran Date: Mon, 20 Apr 2026 09:26:20 +0000 Subject: [PATCH 02/10] Refactor tests to use ConversationAgentServerHost and ConversationContext - Updated all test files to replace InvocationAgentServerHost with ConversationAgentServerHost. - Changed references from InvocationContext to ConversationContext. - Adjusted WebSocket endpoint paths from /invocations/ws to /conversations/ws. - Modified test assertions to check for conversation_id instead of invocation_id. - Ensured that all relevant tests for graceful shutdown, invoke actions, request limits, and tracing are consistent with the new conversation model. --- .../CHANGELOG.md | 12 +- .../MANIFEST.in | 2 +- .../azure-ai-agentserver-websocket/README.md | 136 ++++---- .../__init__.py | 12 +- .../_constants.py | 18 +- .../_conversation.py} | 324 +++++++++--------- .../_version.py | 0 .../{invocations => conversations}/py.typed | 0 .../cspell.json | 2 +- .../pyproject.toml | 12 +- .../{serve_browser_client.py => client.py} | 4 +- .../samples/browser_client/index.html | 158 ++++++++- .../samples/streaming_echo_agent/README.md | 46 ++- .../streaming_echo_agent/agent.manifest.yaml | 10 +- .../samples/streaming_echo_agent/agent.yaml | 2 +- .../samples/streaming_echo_agent/main.py | 105 ++++-- .../streaming_echo_agent/requirements.txt | 2 +- .../serve_browser_client.py | 31 -- .../streaming_invoke_agent/requirements.txt | 2 - .../streaming_invoke_agent.py | 69 ---- .../tests/conftest.py | 82 ++--- .../tests/test_decorator_pattern.py | 124 +++---- .../tests/test_edge_cases.py | 122 +++---- .../tests/test_get_cancel.py | 80 ++--- .../tests/test_graceful_shutdown.py | 42 +-- .../tests/test_invoke.py | 60 ++-- .../tests/test_multimodal_protocol.py | 40 +-- .../tests/test_request_limits.py | 18 +- .../tests/test_server_routes.py | 52 +-- .../tests/test_session_id.py | 44 +-- .../tests/test_span_parenting.py | 18 +- .../tests/test_tracing.py | 102 +++--- .../tests/test_ws_keepalive.py | 22 +- 33 files changed, 931 insertions(+), 822 deletions(-) rename sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/{invocations => conversations}/__init__.py (55%) rename sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/{invocations => conversations}/_constants.py (54%) rename sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/{invocations/_invocation.py => conversations/_conversation.py} (63%) rename sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/{invocations => conversations}/_version.py (100%) rename sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/{invocations => conversations}/py.typed (100%) rename sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/{serve_browser_client.py => client.py} (90%) delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/serve_browser_client.py delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_invoke_agent/requirements.txt delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_invoke_agent/streaming_invoke_agent.py diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/CHANGELOG.md b/sdk/agentserver/azure-ai-agentserver-websocket/CHANGELOG.md index 1cb00d1154d0..a520e75a9d7d 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/CHANGELOG.md +++ b/sdk/agentserver/azure-ai-agentserver-websocket/CHANGELOG.md @@ -4,12 +4,12 @@ ### Features Added -- Initial release of `azure-ai-agentserver-invocations`. -- `InvocationHandler` for wiring invocation protocol endpoints to an `AgentHost`. -- Decorator-based handler registration (`@invocations.invoke_handler`). -- Optional `GET /invocations/{id}` and `POST /invocations/{id}/cancel` endpoints. -- `GET /invocations/docs/openapi.json` for OpenAPI spec serving. -- Invocation ID tracking and session correlation via `agent_session_id` query parameter. +- Initial release of `azure-ai-agentserver-conversations`. +- `ConversationHandler` for wiring conversation protocol endpoints to an `AgentHost`. +- Decorator-based handler registration (`@conversations.invoke_handler`). +- Optional `GET /conversations/{id}` and `POST /conversations/{id}/cancel` endpoints. +- `GET /conversations/docs/openapi.json` for OpenAPI spec serving. +- Conversation ID tracking and session correlation via `agent_session_id` query parameter. - Distributed tracing with GenAI semantic convention span attributes. - W3C Baggage propagation for cross-service correlation. - Streaming response support with span lifecycle management. diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/MANIFEST.in b/sdk/agentserver/azure-ai-agentserver-websocket/MANIFEST.in index cd83a6c13bfa..dd7cd63de8c3 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/MANIFEST.in +++ b/sdk/agentserver/azure-ai-agentserver-websocket/MANIFEST.in @@ -5,4 +5,4 @@ recursive-include samples *.py *.md include azure/__init__.py include azure/ai/__init__.py include azure/ai/agentserver/__init__.py -include azure/ai/agentserver/invocations/py.typed +include azure/ai/agentserver/conversations/py.typed diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/README.md b/sdk/agentserver/azure-ai-agentserver-websocket/README.md index 9d296974fa7c..049ea93b92a2 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/README.md +++ b/sdk/agentserver/azure-ai-agentserver-websocket/README.md @@ -1,13 +1,13 @@ -# Azure AI AgentServerHost Invocations for Python (WebSocket) +# Azure AI AgentServerHost Conversations for Python (WebSocket) -The `azure-ai-agentserver-invocations` package provides the invocation protocol over **WebSocket long connections** for Azure AI Hosted Agent containers. It plugs into the [`azure-ai-agentserver-core`](https://pypi.org/project/azure-ai-agentserver-core/) host framework and exposes a single WebSocket endpoint at `/invocations/ws` that supports invoke, get, cancel, and streaming operations. +The `azure-ai-agentserver-conversations` package provides the conversation protocol over **WebSocket long connections** for Azure AI Hosted Agent containers. It plugs into the [`azure-ai-agentserver-core`](https://pypi.org/project/azure-ai-agentserver-core/) host framework and exposes a single WebSocket endpoint at `/conversations/ws` that supports invoke, get, cancel, and streaming operations. ## Getting started ### Install the package ```bash -pip install azure-ai-agentserver-invocations +pip install azure-ai-agentserver-conversations ``` This automatically installs `azure-ai-agentserver-core` as a dependency. @@ -18,33 +18,33 @@ This automatically installs `azure-ai-agentserver-core` as a dependency. ## Key concepts -### InvocationAgentServerHost +### ConversationAgentServerHost -`InvocationAgentServerHost` is an `AgentServerHost` subclass that adds a WebSocket endpoint for the invocation protocol. It provides decorator methods for registering handler functions: +`ConversationAgentServerHost` is an `AgentServerHost` subclass that adds a WebSocket endpoint for the conversation protocol. It provides decorator methods for registering handler functions: - `@app.invoke_handler` — **Required.** Handles `invoke` actions. Supports both async functions (non-streaming) and async generators (streaming). -- `@app.get_invocation_handler` — Optional. Handles `get_invocation` actions. -- `@app.cancel_invocation_handler` — Optional. Handles `cancel_invocation` actions. +- `@app.get_conversation_handler` — Optional. Handles `get_conversation` actions. +- `@app.cancel_conversation_handler` — Optional. Handles `cancel_conversation` actions. -### InvocationContext +### ConversationContext -Handler functions receive an `InvocationContext` object containing: +Handler functions receive an `ConversationContext` object containing: -- `context.invocation_id` — The invocation ID (echoed from client or auto-generated UUID). +- `context.conversation_id` — The conversation ID (echoed from client or auto-generated UUID). - `context.session_id` — The resolved session ID. -### InvocationError +### ConversationError -Handlers can raise `InvocationError(code, message)` to return a domain-specific error to the client without exposing internal details. +Handlers can raise `ConversationError(code, message)` to return a domain-specific error to the client without exposing internal details. ### WebSocket endpoint -All invocation operations use a single persistent WebSocket connection: +All conversation operations use a single persistent WebSocket connection: | Route | Description | |---|---| -| `ws://host:port/invocations/ws` | WebSocket endpoint for all invocation operations | -| `GET /invocations/docs/openapi.json` | Serve the agent's OpenAPI 3.x spec (HTTP) | +| `ws://host:port/conversations/ws` | WebSocket endpoint for all conversation operations | +| `GET /conversations/docs/openapi.json` | Serve the agent's OpenAPI 3.x spec (HTTP) | | `GET /readiness` | Health check (HTTP) | ### Client → Server messages @@ -52,9 +52,9 @@ All invocation operations use a single persistent WebSocket connection: All messages are JSON text frames with an `action` field: ```json -{"action": "invoke", "payload": {...}, "invocation_id": "optional", "session_id": "optional"} -{"action": "get_invocation", "invocation_id": "required"} -{"action": "cancel_invocation", "invocation_id": "required"} +{"action": "invoke", "payload": {...}, "conversation_id": "optional", "session_id": "optional"} +{"action": "get_conversation", "conversation_id": "required"} +{"action": "cancel_conversation", "conversation_id": "required"} {"action": "ping"} {"action": "pong"} ``` @@ -62,10 +62,10 @@ All messages are JSON text frames with an `action` field: ### Server → Client messages ```json -{"type": "result", "invocation_id": "...", "session_id": "...", "payload": {...}} -{"type": "stream_chunk", "invocation_id": "...", "session_id": "...", "payload": {...}} -{"type": "stream_end", "invocation_id": "...", "session_id": "..."} -{"type": "error", "invocation_id": "...", "error": {"code": "...", "message": "..."}} +{"type": "result", "conversation_id": "...", "session_id": "...", "payload": {...}} +{"type": "stream_chunk", "conversation_id": "...", "session_id": "...", "payload": {...}} +{"type": "stream_end", "conversation_id": "...", "session_id": "..."} +{"type": "error", "conversation_id": "...", "error": {"code": "...", "message": "..."}} {"type": "ping"} {"type": "pong"} ``` @@ -75,18 +75,18 @@ All messages are JSON text frames with an `action` field: Azure APIM and Azure Load Balancer silently drop idle WebSocket connections after approximately 4 minutes, even though the backend supports 60-minute connections. To prevent this, the server sends periodic `{"type": "ping"}` messages to each connected client. - **Default interval**: 30 seconds (well within the ~4-minute idle timeout). -- **Disable**: Pass `ws_ping_interval=0` to `InvocationAgentServerHost()`. +- **Disable**: Pass `ws_ping_interval=0` to `ConversationAgentServerHost()`. - **Custom interval**: Pass any positive integer, e.g. `ws_ping_interval=15`. Clients should respond with `{"action": "pong"}` when they receive a `{"type": "ping"}` message. Clients may also send `{"action": "ping"}` at any time; the server replies with `{"type": "pong"}`. ```python -app = InvocationAgentServerHost(ws_ping_interval=20) # ping every 20 seconds +app = ConversationAgentServerHost(ws_ping_interval=20) # ping every 20 seconds ``` ### Session ID resolution -Session IDs group related invocations into a conversation. The SDK resolves the session ID in order: +Session IDs group related conversations into a session. The SDK resolves the session ID in order: 1. `session_id` field in the WebSocket message 2. `FOUNDRY_AGENT_SESSION_ID` environment variable @@ -94,24 +94,24 @@ Session IDs group related invocations into a conversation. The SDK resolves the ### Distributed tracing -When tracing is enabled on the `AgentServerHost`, invocation spans are automatically created with GenAI semantic conventions: +When tracing is enabled on the `AgentServerHost`, conversation spans are automatically created with GenAI semantic conventions: - **Span name**: `invoke_agent {FOUNDRY_AGENT_NAME}:{FOUNDRY_AGENT_VERSION}` - **Span attributes**: `gen_ai.system`, `gen_ai.operation.name`, `gen_ai.response.id`, `gen_ai.conversation.id`, `gen_ai.agent.id`, `gen_ai.agent.name`, `gen_ai.agent.version` -- **Error tags**: `azure.ai.agentserver.invocations.error.code`, `.error.message` +- **Error tags**: `azure.ai.agentserver.conversations.error.code`, `.error.message` ## Examples ### Simple agent ```python -from azure.ai.agentserver.invocations import InvocationAgentServerHost, InvocationContext +from azure.ai.agentserver.conversations import ConversationAgentServerHost, ConversationContext -app = InvocationAgentServerHost() +app = ConversationAgentServerHost() @app.invoke_handler -async def handle(payload: dict, context: InvocationContext) -> dict: +async def handle(payload: dict, context: ConversationContext) -> dict: return {"greeting": f"Hello, {payload['name']}!"} app.run() @@ -123,7 +123,7 @@ app.run() import asyncio, json, websockets async def main(): - async with websockets.connect("ws://localhost:8088/invocations/ws") as ws: + async with websockets.connect("ws://localhost:8088/conversations/ws") as ws: await ws.send(json.dumps({ "action": "invoke", "payload": {"name": "Alice"} @@ -144,41 +144,41 @@ asyncio.run(main()) ```python import asyncio -from azure.ai.agentserver.invocations import ( - InvocationAgentServerHost, - InvocationContext, - InvocationError, +from azure.ai.agentserver.conversations import ( + ConversationAgentServerHost, + ConversationContext, + ConversationError, ) _tasks: dict[str, asyncio.Task] = {} _results: dict[str, dict] = {} -app = InvocationAgentServerHost() +app = ConversationAgentServerHost() @app.invoke_handler -async def handle(payload: dict, context: InvocationContext) -> dict: - task = asyncio.create_task(do_work(context.invocation_id, payload)) - _tasks[context.invocation_id] = task - return {"invocation_id": context.invocation_id, "status": "running"} - - -@app.get_invocation_handler -async def get_invocation(context: InvocationContext) -> dict: - if context.invocation_id in _results: - return _results[context.invocation_id] - if context.invocation_id in _tasks: - return {"invocation_id": context.invocation_id, "status": "running"} - raise InvocationError("not_found", "Invocation not found") - - -@app.cancel_invocation_handler -async def cancel_invocation(context: InvocationContext) -> dict: - if context.invocation_id in _tasks: - _tasks[context.invocation_id].cancel() - del _tasks[context.invocation_id] - return {"invocation_id": context.invocation_id, "status": "cancelled"} - raise InvocationError("not_found", "Invocation not found") +async def handle(payload: dict, context: ConversationContext) -> dict: + task = asyncio.create_task(do_work(context.conversation_id, payload)) + _tasks[context.conversation_id] = task + return {"conversation_id": context.conversation_id, "status": "running"} + + +@app.get_conversation_handler +async def get_conversation(context: ConversationContext) -> dict: + if context.conversation_id in _results: + return _results[context.conversation_id] + if context.conversation_id in _tasks: + return {"conversation_id": context.conversation_id, "status": "running"} + raise ConversationError("not_found", "Conversation not found") + + +@app.cancel_conversation_handler +async def cancel_conversation(context: ConversationContext) -> dict: + if context.conversation_id in _tasks: + _tasks[context.conversation_id].cancel() + del _tasks[context.conversation_id] + return {"conversation_id": context.conversation_id, "status": "cancelled"} + raise ConversationError("not_found", "Conversation not found") ``` ### Streaming @@ -186,13 +186,13 @@ async def cancel_invocation(context: InvocationContext) -> dict: Use an async generator to stream chunks back to the client. Each yielded dict is sent as a `stream_chunk` message, followed by a `stream_end` when the generator completes. ```python -from azure.ai.agentserver.invocations import InvocationAgentServerHost, InvocationContext +from azure.ai.agentserver.conversations import ConversationAgentServerHost, ConversationContext -app = InvocationAgentServerHost() +app = ConversationAgentServerHost() @app.invoke_handler -async def handle(payload: dict, context: InvocationContext): +async def handle(payload: dict, context: ConversationContext): for word in ["Hello", " ", "world", "!"]: yield {"delta": word} ``` @@ -203,7 +203,7 @@ async def handle(payload: dict, context: InvocationContext): import asyncio, json, websockets async def main(): - async with websockets.connect("ws://localhost:8088/invocations/ws") as ws: + async with websockets.connect("ws://localhost:8088/conversations/ws") as ws: await ws.send(json.dumps({"action": "invoke", "payload": {}})) while True: msg = json.loads(await ws.recv()) @@ -220,13 +220,13 @@ asyncio.run(main()) ### Multi-turn conversation -Use the `session_id` field to group invocations into a conversation over the same WebSocket connection: +Use the `session_id` field to group conversations into a session over the same WebSocket connection: ```python import asyncio, json, websockets async def main(): - async with websockets.connect("ws://localhost:8088/invocations/ws") as ws: + async with websockets.connect("ws://localhost:8088/conversations/ws") as ws: # First turn await ws.send(json.dumps({ "action": "invoke", @@ -248,10 +248,10 @@ asyncio.run(main()) ### Serving an OpenAPI spec -Pass an OpenAPI spec dict to enable the discovery endpoint at `GET /invocations/docs/openapi.json`: +Pass an OpenAPI spec dict to enable the discovery endpoint at `GET /conversations/docs/openapi.json`: ```python -app = InvocationAgentServerHost(openapi_spec={ +app = ConversationAgentServerHost(openapi_spec={ "openapi": "3.0.3", "info": {"title": "My Agent", "version": "1.0.0"}, "paths": { ... }, @@ -266,11 +266,11 @@ To report an issue with the client library, or request additional features, plea ## Next steps -Visit the [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-invocations/samples) folder for complete working examples: +Visit the [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-conversations/samples) folder for complete working examples: | Sample | Description | |---|---| -| [streaming_invoke_agent](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_invoke_agent/) | Streaming code-generation tokens via WebSocket | +| [streaming_invoke_agent](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-conversations/samples/streaming_invoke_agent/) | Streaming code-generation tokens via WebSocket | ## Contributing diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/__init__.py b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/__init__.py similarity index 55% rename from sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/__init__.py rename to sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/__init__.py index da6218812a4b..60cb31771912 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/__init__.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/__init__.py @@ -1,17 +1,17 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Invocations protocol for Azure AI Hosted Agents. +"""Conversations protocol for Azure AI Hosted Agents. -This package provides an invocation protocol host as a subclass of +This package provides an conversation protocol host as a subclass of :class:`~azure.ai.agentserver.core.AgentServerHost`. Quick start:: - from azure.ai.agentserver.invocations import InvocationAgentServerHost + from azure.ai.agentserver.conversations import ConversationAgentServerHost from starlette.responses import JSONResponse - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() @app.invoke_handler async def handle(request): @@ -21,8 +21,8 @@ async def handle(request): """ __path__ = __import__("pkgutil").extend_path(__path__, __name__) -from ._invocation import InvocationAgentServerHost, InvocationContext, InvocationError +from ._conversation import ConversationAgentServerHost, ConversationContext, ConversationError from ._version import VERSION -__all__ = ["InvocationAgentServerHost", "InvocationContext", "InvocationError"] +__all__ = ["ConversationAgentServerHost", "ConversationContext", "ConversationError"] __version__ = VERSION diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/_constants.py b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/_constants.py similarity index 54% rename from sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/_constants.py rename to sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/_constants.py index 5c7fd0804ec4..1e25f644417d 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/_constants.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/_constants.py @@ -3,10 +3,10 @@ # --------------------------------------------------------- -class InvocationConstants: - """Invocation protocol constants. +class ConversationConstants: + """Conversation protocol constants. - Protocol-specific constants for the WebSocket invocation protocol. + Protocol-specific constants for the WebSocket conversation protocol. """ # WebSocket message types (server → client) @@ -19,8 +19,8 @@ class InvocationConstants: # WebSocket actions (client → server) ACTION_INVOKE = "invoke" - ACTION_GET_INVOCATION = "get_invocation" - ACTION_CANCEL_INVOCATION = "cancel_invocation" + ACTION_GET_CONVERSATION = "get_conversation" + ACTION_CANCEL_CONVERSATION = "cancel_conversation" ACTION_PING = "ping" ACTION_PONG = "pong" @@ -28,7 +28,7 @@ class InvocationConstants: DEFAULT_WS_PING_INTERVAL = 30 # seconds # Span attribute keys - ATTR_SPAN_INVOCATION_ID = "azure.ai.agentserver.invocations.invocation_id" - ATTR_SPAN_SESSION_ID = "azure.ai.agentserver.invocations.session_id" - ATTR_SPAN_ERROR_CODE = "azure.ai.agentserver.invocations.error.code" - ATTR_SPAN_ERROR_MESSAGE = "azure.ai.agentserver.invocations.error.message" + ATTR_SPAN_CONVERSATION_ID = "azure.ai.agentserver.conversations.conversation_id" + ATTR_SPAN_SESSION_ID = "azure.ai.agentserver.conversations.session_id" + ATTR_SPAN_ERROR_CODE = "azure.ai.agentserver.conversations.error.code" + ATTR_SPAN_ERROR_MESSAGE = "azure.ai.agentserver.conversations.error.message" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/_invocation.py b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/_conversation.py similarity index 63% rename from sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/_invocation.py rename to sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/_conversation.py index f2bb00137ce9..e6300c9d3740 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/_invocation.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/_conversation.py @@ -1,15 +1,16 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Invocation protocol host for Azure AI Hosted Agents (WebSocket). +"""Conversation protocol host for Azure AI Hosted Agents (WebSocket). -Provides the invocation protocol over WebSocket long connections +Provides the conversation protocol over WebSocket long connections as a :class:`~azure.ai.agentserver.core.AgentServerHost` subclass. """ import asyncio import contextlib import inspect import json +import logging import os import re import uuid @@ -24,14 +25,14 @@ from azure.ai.agentserver.core import ( # pylint: disable=no-name-in-module AgentServerHost, - get_logger, - Constants, create_error_response, + end_span, + record_error, ) -from ._constants import InvocationConstants +from ._constants import ConversationConstants -logger = get_logger() +logger = logging.getLogger("azure.ai.agentserver") # Maximum length and allowed characters for user-provided IDs (defense in depth). _MAX_ID_LENGTH = 256 @@ -58,23 +59,23 @@ def _sanitize_id(value: str, fallback: str) -> str: @dataclass -class InvocationContext: - """Contextual information for an invocation request. +class ConversationContext: + """Contextual information for a conversation request. Passed to handler functions registered via :meth:`invoke_handler`, - :meth:`get_invocation_handler`, and :meth:`cancel_invocation_handler`. + :meth:`get_conversation_handler`, and :meth:`cancel_conversation_handler`. - :param invocation_id: Unique identifier for this invocation. - :type invocation_id: str - :param session_id: Session identifier for this invocation. + :param conversation_id: Unique identifier for this conversation. + :type conversation_id: str + :param session_id: Session identifier for this conversation. :type session_id: str """ - invocation_id: str + conversation_id: str session_id: str -class InvocationError(Exception): +class ConversationError(Exception): """Raised by handlers to signal a domain-specific error. :param code: Machine-readable error code. @@ -89,33 +90,33 @@ def __init__(self, code: str, message: str) -> None: super().__init__(message) -class InvocationAgentServerHost(AgentServerHost): - """Invocation protocol host for Azure AI Hosted Agents over WebSocket. +class ConversationAgentServerHost(AgentServerHost): + """Conversation protocol host for Azure AI Hosted Agents over WebSocket. A :class:`~azure.ai.agentserver.core.AgentServerHost` subclass that adds - a WebSocket endpoint for the invocation protocol. Use the decorator + a WebSocket endpoint for the conversation protocol. Use the decorator methods to wire handler functions to messages. - WebSocket endpoint: ``/invocations/ws`` + WebSocket endpoint: ``/conversations/ws`` **Client → Server messages** (JSON text frames):: - {"action": "invoke", "invocation_id": "opt", "session_id": "opt", "payload": {...}} - {"action": "get_invocation", "invocation_id": "required"} - {"action": "cancel_invocation", "invocation_id": "required"} + {"action": "invoke", "conversation_id": "opt", "session_id": "opt", "payload": {...}} + {"action": "get_conversation", "conversation_id": "required"} + {"action": "cancel_conversation", "conversation_id": "required"} **Server → Client messages** (JSON text frames):: - {"type": "result", "invocation_id": "...", "session_id": "...", "payload": {...}} - {"type": "stream_chunk", "invocation_id": "...", "session_id": "...", "payload": {...}} - {"type": "stream_end", "invocation_id": "...", "session_id": "..."} - {"type": "error", "invocation_id": "...", "error": {"code": "...", "message": "..."}} + {"type": "result", "conversation_id": "...", "session_id": "...", "payload": {...}} + {"type": "stream_chunk", "conversation_id": "...", "session_id": "...", "payload": {...}} + {"type": "stream_end", "conversation_id": "...", "session_id": "..."} + {"type": "error", "conversation_id": "...", "error": {"code": "...", "message": "..."}} Usage:: - from azure.ai.agentserver.invocations import InvocationAgentServerHost, InvocationContext + from azure.ai.agentserver.conversations import ConversationAgentServerHost, ConversationContext - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() @app.invoke_handler async def handle(payload, context): @@ -124,7 +125,7 @@ async def handle(payload, context): app.run() :param openapi_spec: Optional OpenAPI spec dict. When provided, the spec - is served at ``GET /invocations/docs/openapi.json``. + is served at ``GET /conversations/docs/openapi.json``. :type openapi_spec: Optional[dict[str, Any]] :param ws_ping_interval: Interval in seconds between keep-alive ping frames sent to each connected WebSocket client. Keeps the @@ -142,33 +143,33 @@ def __init__( **kwargs: Any, ) -> None: self._invoke_fn: Optional[Callable] = None - self._get_invocation_fn: Optional[Callable] = None - self._cancel_invocation_fn: Optional[Callable] = None + self._get_conversation_fn: Optional[Callable] = None + self._cancel_conversation_fn: Optional[Callable] = None self._openapi_spec = openapi_spec self._ws_ping_interval: int = ( ws_ping_interval if ws_ping_interval is not None - else InvocationConstants.DEFAULT_WS_PING_INTERVAL + else ConversationConstants.DEFAULT_WS_PING_INTERVAL ) - # Build invocation routes - invocation_routes: list[Any] = [ + # Build conversation routes + conversation_routes: list[Any] = [ Route( - "/invocations/docs/openapi.json", + "/conversations/docs/openapi.json", self._get_openapi_spec_endpoint, methods=["GET"], name="get_openapi_spec", ), WebSocketRoute( - "/invocations/ws", + "/conversations/ws", self._websocket_endpoint, - name="invocations_ws", + name="conversations_ws", ), ] # Merge with any routes from sibling mixins via cooperative init existing = list(kwargs.pop("routes", None) or []) - super().__init__(routes=existing + invocation_routes, **kwargs) + super().__init__(routes=existing + conversation_routes, **kwargs) # ------------------------------------------------------------------ # Handler decorators @@ -179,7 +180,7 @@ def invoke_handler( ) -> Callable[..., Any]: """Register a function as the invoke handler. - The handler receives ``(payload: dict, context: InvocationContext)`` + The handler receives ``(payload: dict, context: ConversationContext)`` and may be: - An async function returning a ``dict`` (non-streaming). @@ -211,12 +212,12 @@ async def handle(payload, context): self._invoke_fn = fn return fn - def get_invocation_handler( + def get_conversation_handler( self, fn: Callable[..., Any] ) -> Callable[..., Any]: - """Register a function as the get-invocation handler. + """Register a function as the get-conversation handler. - The handler receives ``(context: InvocationContext)`` and returns + The handler receives ``(context: ConversationContext)`` and returns a ``dict``. :param fn: Async function. @@ -227,18 +228,18 @@ def get_invocation_handler( """ if not inspect.iscoroutinefunction(fn): raise TypeError( - f"get_invocation_handler expects an async function, got {type(fn).__name__}. " + f"get_conversation_handler expects an async function, got {type(fn).__name__}. " "Use 'async def' to define your handler." ) - self._get_invocation_fn = fn + self._get_conversation_fn = fn return fn - def cancel_invocation_handler( + def cancel_conversation_handler( self, fn: Callable[..., Any] ) -> Callable[..., Any]: - """Register a function as the cancel-invocation handler. + """Register a function as the cancel-conversation handler. - The handler receives ``(context: InvocationContext)`` and returns + The handler receives ``(context: ConversationContext)`` and returns a ``dict``. :param fn: Async function. @@ -249,10 +250,10 @@ def cancel_invocation_handler( """ if not inspect.iscoroutinefunction(fn): raise TypeError( - f"cancel_invocation_handler expects an async function, got {type(fn).__name__}. " + f"cancel_conversation_handler expects an async function, got {type(fn).__name__}. " "Use 'async def' to define your handler." ) - self._cancel_invocation_fn = fn + self._cancel_conversation_fn = fn return fn # ------------------------------------------------------------------ @@ -290,7 +291,7 @@ def _safe_set_attrs(span: Any, attrs: dict[str, str]) -> None: def _request_span( self, headers: Any, - invocation_id: str, + conversation_id: str, span_operation: str, operation_name: Optional[str] = None, session_id: str = "", @@ -299,8 +300,8 @@ def _request_span( :param headers: HTTP/WebSocket handshake headers. :type headers: any - :param invocation_id: The request/invocation ID. - :type invocation_id: str + :param conversation_id: The request/conversation ID. + :type conversation_id: str :param span_operation: Span operation name. :type span_operation: str :param operation_name: Optional ``gen_ai.operation.name`` value. @@ -310,18 +311,16 @@ def _request_span( :return: Context manager yielding the OTel span or *None*. :rtype: any """ - if self._tracing is not None: - return self._tracing.request_span( - headers, invocation_id, span_operation, - operation_name=operation_name, session_id=session_id, - end_on_exit=False, - ) - return contextlib.nullcontext(None) + return self.request_span( + headers, conversation_id, span_operation, + operation_name=operation_name, session_id=session_id, + end_on_exit=False, + ) def _simple_request_span( self, headers: Any, - invocation_id: str, + conversation_id: str, span_operation: str, session_id: str = "", ) -> Any: @@ -331,8 +330,8 @@ def _simple_request_span( :param headers: HTTP/WebSocket handshake headers. :type headers: any - :param invocation_id: The request/invocation ID. - :type invocation_id: str + :param conversation_id: The request/conversation ID. + :type conversation_id: str :param span_operation: Span operation name. :type span_operation: str :param session_id: Session ID (empty string if absent). @@ -340,12 +339,10 @@ def _simple_request_span( :return: Context manager yielding the OTel span or *None*. :rtype: any """ - if self._tracing is not None: - return self._tracing.request_span( - headers, invocation_id, span_operation, - session_id=session_id, - ) - return contextlib.nullcontext(None) + return self.request_span( + headers, conversation_id, span_operation, + session_id=session_id, + ) # ------------------------------------------------------------------ # WebSocket endpoint @@ -365,13 +362,13 @@ async def _ws_ping_loop(self, websocket: WebSocket) -> None: try: while True: await asyncio.sleep(self._ws_ping_interval) - await websocket.send_json({"type": InvocationConstants.MSG_TYPE_PING}) + await websocket.send_json({"type": ConversationConstants.MSG_TYPE_PING}) except (WebSocketDisconnect, Exception): # pylint: disable=broad-exception-caught # Connection closed or errored — let the task exit silently. pass async def _websocket_endpoint(self, websocket: WebSocket) -> None: - """Main WebSocket endpoint for the invocation protocol. + """Main WebSocket endpoint for the conversation protocol. Accepts a WebSocket connection and processes JSON messages in a loop. Each message must contain an ``action`` field. @@ -397,34 +394,34 @@ async def _websocket_endpoint(self, websocket: WebSocket) -> None: message = json.loads(raw) except (json.JSONDecodeError, ValueError): await websocket.send_json({ - "type": InvocationConstants.MSG_TYPE_ERROR, + "type": ConversationConstants.MSG_TYPE_ERROR, "error": {"code": "invalid_json", "message": "Invalid JSON message"}, }) continue if not isinstance(message, dict): await websocket.send_json({ - "type": InvocationConstants.MSG_TYPE_ERROR, + "type": ConversationConstants.MSG_TYPE_ERROR, "error": {"code": "invalid_message", "message": "Message must be a JSON object"}, }) continue action = message.get("action") - if action == InvocationConstants.ACTION_INVOKE: + if action == ConversationConstants.ACTION_INVOKE: await self._handle_ws_invoke(websocket, message) - elif action == InvocationConstants.ACTION_GET_INVOCATION: - await self._handle_ws_get_invocation(websocket, message) - elif action == InvocationConstants.ACTION_CANCEL_INVOCATION: - await self._handle_ws_cancel_invocation(websocket, message) - elif action == InvocationConstants.ACTION_PING: + elif action == ConversationConstants.ACTION_GET_CONVERSATION: + await self._handle_ws_get_conversation(websocket, message) + elif action == ConversationConstants.ACTION_CANCEL_CONVERSATION: + await self._handle_ws_cancel_conversation(websocket, message) + elif action == ConversationConstants.ACTION_PING: # Client-initiated ping — respond with pong. - await websocket.send_json({"type": InvocationConstants.MSG_TYPE_PONG}) - elif action == InvocationConstants.ACTION_PONG: + await websocket.send_json({"type": ConversationConstants.MSG_TYPE_PONG}) + elif action == ConversationConstants.ACTION_PONG: # Client pong response — no-op, already kept connection alive. pass else: await websocket.send_json({ - "type": InvocationConstants.MSG_TYPE_ERROR, + "type": ConversationConstants.MSG_TYPE_ERROR, "error": { "code": "invalid_action", "message": f"Unknown action: {action}", @@ -446,23 +443,23 @@ async def _websocket_endpoint(self, websocket: WebSocket) -> None: async def _handle_ws_invoke(self, websocket: WebSocket, message: dict[str, Any]) -> None: generated_id = str(uuid.uuid4()) - raw_invocation_id = message.get("invocation_id") or "" - invocation_id = _sanitize_id(raw_invocation_id, generated_id) + raw_conversation_id = message.get("conversation_id") or "" + conversation_id = _sanitize_id(raw_conversation_id, generated_id) raw_session_id = ( message.get("session_id") - or os.environ.get(Constants.FOUNDRY_AGENT_SESSION_ID) + or os.environ.get("FOUNDRY_AGENT_SESSION_ID") or "" ) session_id = _sanitize_id(raw_session_id, str(uuid.uuid4())) - context = InvocationContext(invocation_id=invocation_id, session_id=session_id) + context = ConversationContext(conversation_id=conversation_id, session_id=session_id) payload = message.get("payload", {}) if self._invoke_fn is None: await websocket.send_json({ - "type": InvocationConstants.MSG_TYPE_ERROR, - "invocation_id": invocation_id, + "type": ConversationConstants.MSG_TYPE_ERROR, + "conversation_id": conversation_id, "session_id": session_id, "error": { "code": "not_implemented", @@ -472,12 +469,12 @@ async def _handle_ws_invoke(self, websocket: WebSocket, message: dict[str, Any]) return with self._request_span( - websocket.headers, invocation_id, "invoke_agent", + websocket.headers, conversation_id, "invoke_agent", operation_name="invoke_agent", session_id=session_id, ) as otel_span: self._safe_set_attrs(otel_span, { - InvocationConstants.ATTR_SPAN_INVOCATION_ID: invocation_id, - InvocationConstants.ATTR_SPAN_SESSION_ID: session_id, + ConversationConstants.ATTR_SPAN_CONVERSATION_ID: conversation_id, + ConversationConstants.ATTR_SPAN_SESSION_ID: session_id, }) try: @@ -485,176 +482,171 @@ async def _handle_ws_invoke(self, websocket: WebSocket, message: dict[str, Any]) # Streaming response async for chunk in self._invoke_fn(payload, context): await websocket.send_json({ - "type": InvocationConstants.MSG_TYPE_STREAM_CHUNK, - "invocation_id": invocation_id, + "type": ConversationConstants.MSG_TYPE_STREAM_CHUNK, + "conversation_id": conversation_id, "session_id": session_id, "payload": chunk, }) await websocket.send_json({ - "type": InvocationConstants.MSG_TYPE_STREAM_END, - "invocation_id": invocation_id, + "type": ConversationConstants.MSG_TYPE_STREAM_END, + "conversation_id": conversation_id, "session_id": session_id, }) else: # Non-streaming response result = await self._invoke_fn(payload, context) await websocket.send_json({ - "type": InvocationConstants.MSG_TYPE_RESULT, - "invocation_id": invocation_id, + "type": ConversationConstants.MSG_TYPE_RESULT, + "conversation_id": conversation_id, "session_id": session_id, "payload": result, }) - except InvocationError as exc: + except ConversationError as exc: self._safe_set_attrs(otel_span, { - InvocationConstants.ATTR_SPAN_ERROR_CODE: exc.code, - InvocationConstants.ATTR_SPAN_ERROR_MESSAGE: exc.message, + ConversationConstants.ATTR_SPAN_ERROR_CODE: exc.code, + ConversationConstants.ATTR_SPAN_ERROR_MESSAGE: exc.message, }) - if self._tracing is not None: - self._tracing.end_span(otel_span, exc=exc) - logger.error("Invocation %s failed: %s", invocation_id, exc) + end_span(otel_span, exc=exc) + logger.error("Conversation %s failed: %s", conversation_id, exc) await websocket.send_json({ - "type": InvocationConstants.MSG_TYPE_ERROR, - "invocation_id": invocation_id, + "type": ConversationConstants.MSG_TYPE_ERROR, + "conversation_id": conversation_id, "session_id": session_id, "error": {"code": exc.code, "message": exc.message}, }) return except Exception as exc: # pylint: disable=broad-exception-caught self._safe_set_attrs(otel_span, { - InvocationConstants.ATTR_SPAN_ERROR_CODE: "internal_error", - InvocationConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), + ConversationConstants.ATTR_SPAN_ERROR_CODE: "internal_error", + ConversationConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), }) - if self._tracing is not None: - self._tracing.end_span(otel_span, exc=exc) - logger.error("Error processing invocation %s: %s", invocation_id, exc, exc_info=True) + end_span(otel_span, exc=exc) + logger.error("Error processing conversation %s: %s", conversation_id, exc, exc_info=True) await websocket.send_json({ - "type": InvocationConstants.MSG_TYPE_ERROR, - "invocation_id": invocation_id, + "type": ConversationConstants.MSG_TYPE_ERROR, + "conversation_id": conversation_id, "session_id": session_id, "error": {"code": "internal_error", "message": "Internal server error"}, }) return # Success — end span - if self._tracing is not None: - self._tracing.end_span(otel_span) + end_span(otel_span) # ------------------------------------------------------------------ - # Get-invocation handler + # Get-conversation handler # ------------------------------------------------------------------ - async def _handle_ws_get_invocation(self, websocket: WebSocket, message: dict[str, Any]) -> None: - invocation_id = message.get("invocation_id") or "" - if not invocation_id: + async def _handle_ws_get_conversation(self, websocket: WebSocket, message: dict[str, Any]) -> None: + conversation_id = message.get("conversation_id") or "" + if not conversation_id: await websocket.send_json({ - "type": InvocationConstants.MSG_TYPE_ERROR, - "error": {"code": "invalid_request", "message": "invocation_id is required"}, + "type": ConversationConstants.MSG_TYPE_ERROR, + "error": {"code": "invalid_request", "message": "conversation_id is required"}, }) return session_id = message.get("session_id") or "" - context = InvocationContext(invocation_id=invocation_id, session_id=session_id) + context = ConversationContext(conversation_id=conversation_id, session_id=session_id) with self._simple_request_span( - websocket.headers, invocation_id, "get_invocation", + websocket.headers, conversation_id, "get_conversation", session_id=session_id, ) as otel_span: self._safe_set_attrs(otel_span, { - InvocationConstants.ATTR_SPAN_INVOCATION_ID: invocation_id, - InvocationConstants.ATTR_SPAN_SESSION_ID: session_id, + ConversationConstants.ATTR_SPAN_CONVERSATION_ID: conversation_id, + ConversationConstants.ATTR_SPAN_SESSION_ID: session_id, }) - if self._get_invocation_fn is None: + if self._get_conversation_fn is None: await websocket.send_json({ - "type": InvocationConstants.MSG_TYPE_ERROR, - "invocation_id": invocation_id, - "error": {"code": "not_found", "message": "get_invocation not implemented"}, + "type": ConversationConstants.MSG_TYPE_ERROR, + "conversation_id": conversation_id, + "error": {"code": "not_found", "message": "get_conversation not implemented"}, }) return try: - result = await self._get_invocation_fn(context) + result = await self._get_conversation_fn(context) await websocket.send_json({ - "type": InvocationConstants.MSG_TYPE_RESULT, - "invocation_id": invocation_id, + "type": ConversationConstants.MSG_TYPE_RESULT, + "conversation_id": conversation_id, "payload": result, }) - except InvocationError as exc: + except ConversationError as exc: await websocket.send_json({ - "type": InvocationConstants.MSG_TYPE_ERROR, - "invocation_id": invocation_id, + "type": ConversationConstants.MSG_TYPE_ERROR, + "conversation_id": conversation_id, "error": {"code": exc.code, "message": exc.message}, }) except Exception as exc: # pylint: disable=broad-exception-caught self._safe_set_attrs(otel_span, { - InvocationConstants.ATTR_SPAN_ERROR_CODE: "internal_error", - InvocationConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), + ConversationConstants.ATTR_SPAN_ERROR_CODE: "internal_error", + ConversationConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), }) - if self._tracing is not None: - self._tracing.record_error(otel_span, exc) - logger.error("Error in get_invocation %s: %s", invocation_id, exc, exc_info=True) + record_error(otel_span, exc) + logger.error("Error in get_conversation %s: %s", conversation_id, exc, exc_info=True) await websocket.send_json({ - "type": InvocationConstants.MSG_TYPE_ERROR, - "invocation_id": invocation_id, + "type": ConversationConstants.MSG_TYPE_ERROR, + "conversation_id": conversation_id, "error": {"code": "internal_error", "message": "Internal server error"}, }) # ------------------------------------------------------------------ - # Cancel-invocation handler + # Cancel-conversation handler # ------------------------------------------------------------------ - async def _handle_ws_cancel_invocation(self, websocket: WebSocket, message: dict[str, Any]) -> None: - invocation_id = message.get("invocation_id") or "" - if not invocation_id: + async def _handle_ws_cancel_conversation(self, websocket: WebSocket, message: dict[str, Any]) -> None: + conversation_id = message.get("conversation_id") or "" + if not conversation_id: await websocket.send_json({ - "type": InvocationConstants.MSG_TYPE_ERROR, - "error": {"code": "invalid_request", "message": "invocation_id is required"}, + "type": ConversationConstants.MSG_TYPE_ERROR, + "error": {"code": "invalid_request", "message": "conversation_id is required"}, }) return session_id = message.get("session_id") or "" - context = InvocationContext(invocation_id=invocation_id, session_id=session_id) + context = ConversationContext(conversation_id=conversation_id, session_id=session_id) with self._simple_request_span( - websocket.headers, invocation_id, "cancel_invocation", + websocket.headers, conversation_id, "cancel_conversation", session_id=session_id, ) as otel_span: self._safe_set_attrs(otel_span, { - InvocationConstants.ATTR_SPAN_INVOCATION_ID: invocation_id, - InvocationConstants.ATTR_SPAN_SESSION_ID: session_id, + ConversationConstants.ATTR_SPAN_CONVERSATION_ID: conversation_id, + ConversationConstants.ATTR_SPAN_SESSION_ID: session_id, }) - if self._cancel_invocation_fn is None: + if self._cancel_conversation_fn is None: await websocket.send_json({ - "type": InvocationConstants.MSG_TYPE_ERROR, - "invocation_id": invocation_id, - "error": {"code": "not_found", "message": "cancel_invocation not implemented"}, + "type": ConversationConstants.MSG_TYPE_ERROR, + "conversation_id": conversation_id, + "error": {"code": "not_found", "message": "cancel_conversation not implemented"}, }) return try: - result = await self._cancel_invocation_fn(context) + result = await self._cancel_conversation_fn(context) await websocket.send_json({ - "type": InvocationConstants.MSG_TYPE_RESULT, - "invocation_id": invocation_id, + "type": ConversationConstants.MSG_TYPE_RESULT, + "conversation_id": conversation_id, "payload": result, }) - except InvocationError as exc: + except ConversationError as exc: await websocket.send_json({ - "type": InvocationConstants.MSG_TYPE_ERROR, - "invocation_id": invocation_id, + "type": ConversationConstants.MSG_TYPE_ERROR, + "conversation_id": conversation_id, "error": {"code": exc.code, "message": exc.message}, }) except Exception as exc: # pylint: disable=broad-exception-caught self._safe_set_attrs(otel_span, { - InvocationConstants.ATTR_SPAN_ERROR_CODE: "internal_error", - InvocationConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), + ConversationConstants.ATTR_SPAN_ERROR_CODE: "internal_error", + ConversationConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), }) - if self._tracing is not None: - self._tracing.record_error(otel_span, exc) - logger.error("Error in cancel_invocation %s: %s", invocation_id, exc, exc_info=True) + record_error(otel_span, exc) + logger.error("Error in cancel_conversation %s: %s", conversation_id, exc, exc_info=True) await websocket.send_json({ - "type": InvocationConstants.MSG_TYPE_ERROR, - "invocation_id": invocation_id, + "type": ConversationConstants.MSG_TYPE_ERROR, + "conversation_id": conversation_id, "error": {"code": "internal_error", "message": "Internal server error"}, }) diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/_version.py b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/_version.py similarity index 100% rename from sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/_version.py rename to sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/_version.py diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/py.typed b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/py.typed similarity index 100% rename from sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/invocations/py.typed rename to sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/py.typed diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/cspell.json b/sdk/agentserver/azure-ai-agentserver-websocket/cspell.json index 5858cd8e195b..10b6a26672a2 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/cspell.json +++ b/sdk/agentserver/azure-ai-agentserver-websocket/cspell.json @@ -7,7 +7,7 @@ "caplog", "genai", "hypercorn", - "invocations", + "conversations", "openapi", "paramtype", "pytestmark", diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/pyproject.toml b/sdk/agentserver/azure-ai-agentserver-websocket/pyproject.toml index 2427a5757164..8673bbe5d5b2 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/pyproject.toml +++ b/sdk/agentserver/azure-ai-agentserver-websocket/pyproject.toml @@ -1,7 +1,7 @@ [project] -name = "azure-ai-agentserver-invocations" +name = "azure-ai-agentserver-websocket" dynamic = ["version", "readme"] -description = "Invocations protocol for Azure AI Hosted Agents" +description = "Conversations protocol for Azure AI Hosted Agents" requires-python = ">=3.10" authors = [ { name = "Microsoft Corporation", email = "azpysdkhelp@microsoft.com" }, @@ -18,7 +18,7 @@ classifiers = [ "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.14", ] -keywords = ["azure", "azure sdk", "agent", "agentserver", "invocations"] +keywords = ["azure", "azure sdk", "agent", "agentserver", "conversations", "websocket"] dependencies = [ "azure-ai-agentserver-core>=2.0.0b1", @@ -42,11 +42,11 @@ exclude = [ ] [tool.setuptools.dynamic] -version = { attr = "azure.ai.agentserver.invocations._version.VERSION" } +version = { attr = "azure.ai.agentserver.conversations._version.VERSION" } readme = { file = ["README.md"], content-type = "text/markdown" } [tool.setuptools.package-data] -"azure.ai.agentserver.invocations" = ["py.typed"] +"azure.ai.agentserver.conversations" = ["py.typed"] [tool.ruff] line-length = 120 @@ -56,7 +56,7 @@ lint.ignore = [] fix = false [tool.ruff.lint.isort] -known-first-party = ["azure.ai.agentserver.invocations"] +known-first-party = ["azure.ai.agentserver.conversations"] combine-as-imports = true [tool.azure-sdk-build] diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/serve_browser_client.py b/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/client.py similarity index 90% rename from sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/serve_browser_client.py rename to sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/client.py index 4bf16cff3b9e..93fee55a2d4f 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/serve_browser_client.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/client.py @@ -2,8 +2,8 @@ Usage:: - python serve_browser_client.py - python serve_browser_client.py --port 3000 + python client.py + python client.py --port 3000 """ import argparse import http.server diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/index.html b/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/index.html index b05e37ca2944..b20a542347aa 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/index.html +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/index.html @@ -3,7 +3,7 @@ -Echo Agent WebSocket Client +Echo Agent Client
-

Echo Agent WebSocket Client

+

Echo Agent Client

+
+ + + + +
- + Disconnected @@ -62,9 +73,44 @@

Echo Agent WebSocket Client

const chat = document.getElementById("chat"); const messageInput = document.getElementById("messageInput"); const sendBtn = document.getElementById("sendBtn"); + const modeWs = document.getElementById("modeWs"); + const modeHttp = document.getElementById("modeHttp"); let ws = null; let currentAgentBubble = null; + let httpAbortController = null; + + function getMode() { + return modeHttp.checked ? "http" : "websocket"; + } + + // --- Mode switching --- + function onModeChange() { + // Disconnect any existing WebSocket + if (ws) { ws.close(); ws = null; } + cleanup(); + + if (getMode() === "http") { + urlInput.value = "http://localhost:8088/conversations"; + urlInput.placeholder = "http://host:port/conversations"; + connectBtn.style.display = "none"; + disconnectBtn.style.display = "none"; + setStatus("connected"); + setInputEnabled(true); + addMessage("Switched to HTTP (SSE) mode", "system"); + } else { + urlInput.value = "ws://localhost:8088/conversations/ws"; + urlInput.placeholder = "ws://host:port/conversations/ws"; + connectBtn.style.display = ""; + disconnectBtn.style.display = ""; + setStatus("disconnected"); + setInputEnabled(false); + addMessage("Switched to WebSocket mode", "system"); + } + } + + modeWs.addEventListener("change", onModeChange); + modeHttp.addEventListener("change", onModeChange); function setStatus(state) { statusDot.className = "dot " + state; @@ -86,7 +132,9 @@

Echo Agent WebSocket Client

if (enabled) messageInput.focus(); } + // --- WebSocket --- function doConnect() { + if (getMode() !== "websocket") return; const url = urlInput.value.trim(); if (!url) return; setStatus("connecting"); @@ -158,22 +206,108 @@

Echo Agent WebSocket Client

function cleanup() { ws = null; currentAgentBubble = null; - setStatus("disconnected"); - connectBtn.disabled = false; - disconnectBtn.disabled = true; + if (getMode() === "websocket") { + setStatus("disconnected"); + connectBtn.disabled = false; + disconnectBtn.disabled = true; + setInputEnabled(false); + } + } + + // --- HTTP SSE --- + async function doHttpSend(text) { + const url = urlInput.value.trim(); + if (!url) return; + setInputEnabled(false); + currentAgentBubble = null; + httpAbortController = new AbortController(); + + try { + const response = await fetch(url, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ message: text }), + signal: httpAbortController.signal, + }); + + if (!response.ok) { + addMessage("HTTP error: " + response.status, "system"); + setInputEnabled(true); + return; + } + + const reader = response.body.getReader(); + const decoder = new TextDecoder(); + let buffer = ""; + + while (true) { + const { done, value } = await reader.read(); + if (done) break; + buffer += decoder.decode(value, { stream: true }); + + // Parse SSE lines + const lines = buffer.split("\n"); + buffer = lines.pop(); // keep incomplete line in buffer + + let eventType = null; + for (const line of lines) { + if (line.startsWith("event: ")) { + eventType = line.slice(7).trim(); + } else if (line.startsWith("data: ")) { + const data = line.slice(6); + if (eventType === "done") { + currentAgentBubble = null; + addMessage("— stream complete —", "system"); + setInputEnabled(true); + return; + } + try { + const parsed = JSON.parse(data); + const token = parsed.token || ""; + if (!currentAgentBubble) { + currentAgentBubble = addMessage("", "agent"); + } + currentAgentBubble.textContent += token + " "; + chat.scrollTop = chat.scrollHeight; + } catch { /* ignore parse errors */ } + eventType = null; + } else if (line === "") { + eventType = null; + } + } + } + + // Stream ended without explicit done event + if (currentAgentBubble) { + currentAgentBubble = null; + addMessage("— stream complete —", "system"); + } + } catch (err) { + if (err.name !== "AbortError") { + addMessage("Request error: " + err.message, "system"); + } + } + setInputEnabled(true); } + // --- Send (both modes) --- function doSend() { const text = messageInput.value.trim(); - if (!text || !ws || ws.readyState !== WebSocket.OPEN) return; + if (!text) return; addMessage(text, "user"); - ws.send(JSON.stringify({ - action: "invoke", - payload: { message: text } - })); messageInput.value = ""; - setInputEnabled(false); + + if (getMode() === "http") { + doHttpSend(text); + } else { + if (!ws || ws.readyState !== WebSocket.OPEN) return; + ws.send(JSON.stringify({ + action: "invoke", + payload: { message: text } + })); + setInputEnabled(false); + } } connectBtn.addEventListener("click", doConnect); diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/README.md b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/README.md index b42c95bc4447..be09ab4294f5 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/README.md +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/README.md @@ -1,12 +1,18 @@ **IMPORTANT!** All samples and other resources made available in this GitHub repository ("samples") are designed to assist in accelerating development of agents, solutions, and agent workflows for various scenarios. Review all provided resources and carefully test output behavior in the context of your use case. AI responses may be inaccurate and AI actions should be monitored with human oversight. -# Echo Agent — Invocations Protocol (WebSocket Streaming) +# Echo Agent — Conversations Protocol (WebSocket + HTTP SSE Streaming) -This sample demonstrates a minimal echo agent built with [azure-ai-agentserver-invocations](https://pypi.org/project/azure-ai-agentserver-invocations/) that streams responses word-by-word using WebSocket. +This sample demonstrates a minimal echo agent built with [azure-ai-agentserver-conversations](https://pypi.org/project/azure-ai-agentserver-conversations/) that streams responses word-by-word. It supports **two communication modes**: + +- **WebSocket** — persistent connection at `ws://localhost:8088/conversations/ws` +- **HTTP SSE** — stateless POST at `http://localhost:8088/conversations` ## How It Works -The agent receives user input via the Invocations protocol over WebSocket (`ws://localhost:8088/invocations/ws`) and echoes it back with a `🔊 Echo:` prefix. Each word is streamed as a separate `stream_chunk` message, followed by a final `stream_end` signal. +The agent receives user input and echoes it back with a `🔊 Echo:` prefix. Each word is streamed as a separate token chunk. + +- **WebSocket mode**: tokens are sent as `stream_chunk` messages, followed by a `stream_end` signal. +- **HTTP SSE mode**: tokens are sent as `data:` lines per the Server-Sent Events spec, followed by an `event: done` signal. ## Running Locally @@ -29,22 +35,15 @@ python main.py The agent starts on `http://localhost:8088/`. -### Test - -Using the included client: +### Test with WebSocket -```bash -python streaming_client.py -python streaming_client.py --message "Hello world!" -``` - -Or using the `websockets` library directly: +Using the `websockets` library: ```python import asyncio, json, websockets async def main(): - async with websockets.connect("ws://localhost:8088/invocations/ws") as ws: + async with websockets.connect("ws://localhost:8088/conversations/ws") as ws: await ws.send(json.dumps({ "action": "invoke", "payload": {"message": "Hello world!"} @@ -63,6 +62,27 @@ async def main(): asyncio.run(main()) ``` +### Test with HTTP SSE + +Using `curl`: + +```bash +curl -N -X POST http://localhost:8088/conversations \ + -H "Content-Type: application/json" \ + -d '{"message": "Hello world!"}' +``` + +### Browser Client + +A browser-based client with a WebSocket/HTTP mode switcher is available under `../browser_client/`: + +```bash +cd ../browser_client +python client.py +``` + +Then open `http://localhost:8080` and use the toggle to switch between WebSocket and HTTP (SSE) modes. + ## Deploying to Microsoft Foundry To deploy your agent to Microsoft Foundry, follow the deployment guide at https://github.com/microsoft/hosted-agents-vnext-private-preview/blob/main/azd-quickstart.md diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.manifest.yaml b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.manifest.yaml index 5ad7ba61265b..b77578fb4ba2 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.manifest.yaml +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.manifest.yaml @@ -1,16 +1,16 @@ -name: echo-agent-invocations-websocket-streaming +name: echo-agent-conversations-websocket-streaming description: > A simple echo agent that streams responses word-by-word using the - azure-ai-agentserver-invocations SDK with WebSocket streaming. + azure-ai-agentserver-conversations SDK with WebSocket streaming. metadata: tags: - AI Agent Hosting - Azure AI AgentServer - - Invocations Protocol + - Conversations Protocol - Streaming template: - name: echo-agent-invocations-streaming + name: echo-agent-conversations-streaming kind: hosted protocols: - - protocol: invocations + - protocol: conversations version: v0.0.1 diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.yaml b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.yaml index 7fe3fd65dca8..73bb8c93d51a 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.yaml +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.yaml @@ -1,7 +1,7 @@ kind: hosted name: echo-agent-streaming protocols: - - protocol: invocations + - protocol: conversations version: v0.0.1 resources: cpu: "0.25" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/main.py b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/main.py index 58ca066e95e7..df596a07c36e 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/main.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/main.py @@ -1,20 +1,23 @@ # Copyright (c) Microsoft. All rights reserved. -"""Streaming echo agent using azure-ai-agentserver-invocations (WebSocket). +"""Streaming echo agent supporting both WebSocket and HTTP (SSE) conversations. -Echoes user input back as a WebSocket stream, -sending each word as a separate token chunk. +Echoes user input back as a stream, sending each word as a separate token chunk. +Supports two communication modes: + +- **WebSocket** at ``ws://localhost:8088/conversations/ws`` +- **HTTP SSE** at ``POST http://localhost:8088/conversations`` **Server** (this file):: python main.py -**Client** (using the ``websockets`` library):: +**WebSocket client** (using the ``websockets`` library):: import asyncio, json, websockets async def main(): - async with websockets.connect("ws://localhost:8088/invocations/ws") as ws: + async with websockets.connect("ws://localhost:8088/conversations/ws") as ws: await ws.send(json.dumps({ "action": "invoke", "payload": {"message": "Hello world!"} @@ -31,40 +34,102 @@ async def main(): break asyncio.run(main()) + +**HTTP SSE client** (using ``curl``):: + + curl -N -X POST http://localhost:8088/conversations \\ + -H "Content-Type: application/json" \\ + -d '{"message": "Hello world!"}' """ import asyncio +import json from collections.abc import AsyncGenerator -from azure.ai.agentserver.invocations import InvocationAgentServerHost, InvocationContext +from starlette.requests import Request +from starlette.responses import Response, StreamingResponse +from starlette.routing import Route + +from azure.ai.agentserver.conversations import ConversationAgentServerHost, ConversationContext ECHO_PREFIX = "🔊 Echo: " -app = InvocationAgentServerHost() +_CORS_HEADERS = { + "Access-Control-Allow-Origin": "*", + "Access-Control-Allow-Methods": "POST, OPTIONS", + "Access-Control-Allow-Headers": "Content-Type", +} + + +async def echo_tokens(message: str) -> AsyncGenerator[dict, None]: + """Yield token dicts with simulated latency (shared by both protocols). + + :param message: The user message to echo. + :type message: str + """ + echo_text = f"{ECHO_PREFIX}{message}" + words = echo_text.split() + for word in words: + yield {"token": word} + await asyncio.sleep(0.1) # simulate token-by-token latency + + +# --------------------------------------------------------------------------- +# HTTP SSE conversation endpoint +# --------------------------------------------------------------------------- + + +async def handle_http_invoke(request: Request) -> Response: + """HTTP conversation endpoint — streams tokens as Server-Sent Events. + + :param request: The incoming HTTP request. + :type request: ~starlette.requests.Request + """ + if request.method == "OPTIONS": + return Response(status_code=204, headers=_CORS_HEADERS) + + data = await request.json() + message = data.get("message", "Hello! Send me a message and I'll echo it back.") + + async def generate_sse() -> AsyncGenerator[bytes, None]: + async for chunk in echo_tokens(message): + payload = json.dumps(chunk) + yield f"data: {payload}\n\n".encode() + yield b"event: done\ndata: {}\n\n" + + return StreamingResponse( + generate_sse(), + media_type="text/event-stream", + headers={**_CORS_HEADERS, "Cache-Control": "no-cache", "Connection": "keep-alive"}, + ) + + +# --------------------------------------------------------------------------- +# Application — WebSocket + HTTP routes +# --------------------------------------------------------------------------- + +app = ConversationAgentServerHost( + routes=[ + Route("/conversations", handle_http_invoke, methods=["POST", "OPTIONS"]), + ], +) @app.invoke_handler async def handle_invoke( - payload: dict, context: InvocationContext # pylint: disable=unused-argument + payload: dict, context: ConversationContext # pylint: disable=unused-argument ) -> AsyncGenerator[dict, None]: - """Yield token chunks with simulated latency. - - Each chunk is sent as a WebSocket ``stream_chunk`` message. - A final ``stream_end`` message signals completion (handled by the framework). + """WebSocket streaming handler — each chunk is a ``stream_chunk`` message. :param payload: The client request payload. :type payload: dict - :param context: Invocation context with IDs. - :type context: InvocationContext + :param context: Conversation context with IDs. + :type context: ConversationContext """ message = payload.get( "message", "Hello! Send me a message and I'll echo it back.") - echo_text = f"{ECHO_PREFIX}{message}" - words = echo_text.split() - - for word in words: - yield {"token": word} - await asyncio.sleep(0.1) # simulate token-by-token latency + async for chunk in echo_tokens(message): + yield chunk if __name__ == "__main__": diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/requirements.txt b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/requirements.txt index 5c6c36ed6d78..2db7962ad24d 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/requirements.txt +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/requirements.txt @@ -1,2 +1,2 @@ -azure-ai-agentserver-invocations +../../ websockets \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/serve_browser_client.py b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/serve_browser_client.py deleted file mode 100644 index cf39ce5ccc15..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/serve_browser_client.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Serve the browser client on a local HTTP port. - -Usage:: - - python serve_browser_client.py - python serve_browser_client.py --port 3000 -""" -import argparse -import http.server -import os -import functools - - -def main() -> None: - parser = argparse.ArgumentParser(description="Serve browser client locally") - parser.add_argument("--port", type=int, default=8080, help="Port to serve on (default: 8080)") - args = parser.parse_args() - - directory = os.path.dirname(os.path.abspath(__file__)) - handler = functools.partial(http.server.SimpleHTTPRequestHandler, directory=directory) - - with http.server.HTTPServer(("", args.port), handler) as httpd: - print(f"Serving browser client at http://localhost:{args.port}/browser_client.html") - try: - httpd.serve_forever() - except KeyboardInterrupt: - print("\nStopped.") - - -if __name__ == "__main__": - main() diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_invoke_agent/requirements.txt b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_invoke_agent/requirements.txt deleted file mode 100644 index fe489ac8ac35..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_invoke_agent/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -azure-ai-agentserver-invocations -websockets diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_invoke_agent/streaming_invoke_agent.py b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_invoke_agent/streaming_invoke_agent.py deleted file mode 100644 index 568da5a41165..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_invoke_agent/streaming_invoke_agent.py +++ /dev/null @@ -1,69 +0,0 @@ -"""Streaming invoke agent example (WebSocket). - -Demonstrates returning results incrementally via WebSocket streaming. -Callers receive real-time partial output as tokens are generated. - -**Server** (this file):: - - python streaming_invoke_agent.py - -**Client** (using the ``websockets`` library):: - - import asyncio, json, websockets - - async def main(): - async with websockets.connect("ws://localhost:8088/invocations/ws") as ws: - await ws.send(json.dumps({ - "action": "invoke", - "payload": {"prompt": "Write a Calculator class with an Add method"} - })) - while True: - msg = json.loads(await ws.recv()) - if msg["type"] == "stream_chunk": - print(msg["payload"]["token"], end="", flush=True) - elif msg["type"] == "stream_end": - print("\\nDone!", flush=True) - break - elif msg["type"] == "error": - print(f"Error: {msg['error']}") - break - - asyncio.run(main()) -""" -import asyncio -from collections.abc import AsyncGenerator # pylint: disable=import-error - -from azure.ai.agentserver.invocations import InvocationAgentServerHost, InvocationContext - - -app = InvocationAgentServerHost() - -# Simulated tokens — in production these would come from a model. -_SIMULATED_TOKENS = [ - "class", " Calculator", ":", "\n", - " ", "def", " add", "(", "self", ",", " a", ",", " b", ")", ":", "\n", - " ", "return", " a", " +", " b", "\n", -] - - -@app.invoke_handler -async def handle_invoke( - payload: dict, context: InvocationContext # pylint: disable=unused-argument -) -> AsyncGenerator[dict, None]: - """Yield token chunks with simulated latency. - - Each chunk is sent as a WebSocket ``stream_chunk`` message. - A final ``stream_end`` message signals completion (handled by the framework). - - :param payload: The client request payload (unused in this demo). - :type payload: dict - :param context: Invocation context with IDs. - :type context: InvocationContext - """ - for token in _SIMULATED_TOKENS: - yield {"token": token} - await asyncio.sleep(0.15) # simulate model latency - - -if __name__ == "__main__": - app.run() diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/conftest.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/conftest.py index 5fdd0c258b23..0520817f3487 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/conftest.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/conftest.py @@ -1,16 +1,16 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Shared fixtures and factory functions for invocations WebSocket tests.""" +"""Shared fixtures and factory functions for conversations WebSocket tests.""" from typing import Any import pytest from starlette.testclient import TestClient -from azure.ai.agentserver.invocations import ( - InvocationAgentServerHost, - InvocationContext, - InvocationError, +from azure.ai.agentserver.conversations import ( + ConversationAgentServerHost, + ConversationContext, + ConversationError, ) @@ -22,7 +22,7 @@ "openapi": "3.0.0", "info": {"title": "Echo Agent", "version": "1.0.0"}, "paths": { - "/invocations": { + "/conversations": { "post": { "requestBody": { "required": True, @@ -64,72 +64,72 @@ # --------------------------------------------------------------------------- -def _make_echo_agent(**kwargs: Any) -> InvocationAgentServerHost: - """Create an InvocationAgentServerHost whose invoke handler echoes the payload.""" - app = InvocationAgentServerHost(**kwargs) +def _make_echo_agent(**kwargs: Any) -> ConversationAgentServerHost: + """Create an ConversationAgentServerHost whose invoke handler echoes the payload.""" + app = ConversationAgentServerHost(**kwargs) @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: - return {"echo": payload, "invocation_id": context.invocation_id} + async def handle(payload: dict, context: ConversationContext) -> dict: + return {"echo": payload, "conversation_id": context.conversation_id} return app -def _make_streaming_agent(**kwargs: Any) -> InvocationAgentServerHost: - """Create an InvocationAgentServerHost whose invoke handler yields 3 JSON chunks.""" - app = InvocationAgentServerHost(**kwargs) +def _make_streaming_agent(**kwargs: Any) -> ConversationAgentServerHost: + """Create an ConversationAgentServerHost whose invoke handler yields 3 JSON chunks.""" + app = ConversationAgentServerHost(**kwargs) @app.invoke_handler - async def handle(payload: dict, context: InvocationContext): + async def handle(payload: dict, context: ConversationContext): for i in range(3): yield {"chunk": i} return app -def _make_async_storage_agent(**kwargs: Any) -> InvocationAgentServerHost: - """Create an InvocationAgentServerHost with get/cancel handlers and in-memory store.""" - app = InvocationAgentServerHost(**kwargs) +def _make_async_storage_agent(**kwargs: Any) -> ConversationAgentServerHost: + """Create an ConversationAgentServerHost with get/cancel handlers and in-memory store.""" + app = ConversationAgentServerHost(**kwargs) store: dict[str, dict] = {} @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: - store[context.invocation_id] = payload - return {"stored": True, "invocation_id": context.invocation_id} - - @app.get_invocation_handler - async def get_handler(context: InvocationContext) -> dict: - if context.invocation_id not in store: - raise InvocationError("not_found", "Not found") - return {"data": store[context.invocation_id]} - - @app.cancel_invocation_handler - async def cancel_handler(context: InvocationContext) -> dict: - if context.invocation_id not in store: - raise InvocationError("not_found", "Not found") - del store[context.invocation_id] + async def handle(payload: dict, context: ConversationContext) -> dict: + store[context.conversation_id] = payload + return {"stored": True, "conversation_id": context.conversation_id} + + @app.get_conversation_handler + async def get_handler(context: ConversationContext) -> dict: + if context.conversation_id not in store: + raise ConversationError("not_found", "Not found") + return {"data": store[context.conversation_id]} + + @app.cancel_conversation_handler + async def cancel_handler(context: ConversationContext) -> dict: + if context.conversation_id not in store: + raise ConversationError("not_found", "Not found") + del store[context.conversation_id] return {"status": "cancelled"} return app -def _make_validated_agent() -> InvocationAgentServerHost: - """Create an InvocationAgentServerHost with OpenAPI spec.""" - app = InvocationAgentServerHost(openapi_spec=SAMPLE_OPENAPI_SPEC) +def _make_validated_agent() -> ConversationAgentServerHost: + """Create an ConversationAgentServerHost with OpenAPI spec.""" + app = ConversationAgentServerHost(openapi_spec=SAMPLE_OPENAPI_SPEC) @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"reply": f"echo: {payload['message']}"} return app -def _make_failing_agent(**kwargs: Any) -> InvocationAgentServerHost: - """Create an InvocationAgentServerHost whose handler raises ValueError.""" - app = InvocationAgentServerHost(**kwargs) +def _make_failing_agent(**kwargs: Any) -> ConversationAgentServerHost: + """Create an ConversationAgentServerHost whose handler raises ValueError.""" + app = ConversationAgentServerHost(**kwargs) @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: raise ValueError("something went wrong") return app diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_decorator_pattern.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_decorator_pattern.py index 62db1498e3f3..7d71f20b70b2 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_decorator_pattern.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_decorator_pattern.py @@ -1,13 +1,13 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Tests for decorator-based handler registration on InvocationAgentServerHost.""" +"""Tests for decorator-based handler registration on ConversationAgentServerHost.""" from starlette.testclient import TestClient -from azure.ai.agentserver.invocations import ( - InvocationAgentServerHost, - InvocationContext, - InvocationError, +from azure.ai.agentserver.conversations import ( + ConversationAgentServerHost, + ConversationContext, + ConversationError, ) @@ -17,10 +17,10 @@ def test_invoke_handler_stores_function(): """@app.invoke_handler stores the function on the protocol object.""" - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"ok": True} assert app._invoke_fn is handle @@ -32,9 +32,9 @@ async def handle(payload: dict, context: InvocationContext) -> dict: def test_invoke_handler_returns_original_function(): """@app.invoke_handler returns the original function.""" - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"ok": True} result = app.invoke_handler(handle) @@ -42,33 +42,33 @@ async def handle(payload: dict, context: InvocationContext) -> dict: # --------------------------------------------------------------------------- -# get_invocation_handler stores function +# get_conversation_handler stores function # --------------------------------------------------------------------------- -def test_get_invocation_handler_stores_function(): - """@app.get_invocation_handler stores the function.""" - app = InvocationAgentServerHost() +def test_get_conversation_handler_stores_function(): + """@app.get_conversation_handler stores the function.""" + app = ConversationAgentServerHost() - @app.get_invocation_handler - async def get_handler(context: InvocationContext) -> dict: + @app.get_conversation_handler + async def get_handler(context: ConversationContext) -> dict: return {"ok": True} - assert app._get_invocation_fn is get_handler + assert app._get_conversation_fn is get_handler # --------------------------------------------------------------------------- -# cancel_invocation_handler stores function +# cancel_conversation_handler stores function # --------------------------------------------------------------------------- -def test_cancel_invocation_handler_stores_function(): - """@app.cancel_invocation_handler stores the function.""" - app = InvocationAgentServerHost() +def test_cancel_conversation_handler_stores_function(): + """@app.cancel_conversation_handler stores the function.""" + app = ConversationAgentServerHost() - @app.cancel_invocation_handler - async def cancel_handler(context: InvocationContext) -> dict: + @app.cancel_conversation_handler + async def cancel_handler(context: ConversationContext) -> dict: return {"ok": True} - assert app._cancel_invocation_fn is cancel_handler + assert app._cancel_conversation_fn is cancel_handler # --------------------------------------------------------------------------- @@ -77,7 +77,7 @@ async def cancel_handler(context: InvocationContext) -> dict: def test_shutdown_handler_stores_function(): """@server.shutdown_handler stores the function on the server.""" - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() @app.shutdown_handler async def on_shutdown(): @@ -92,49 +92,49 @@ async def on_shutdown(): def test_full_request_flow(): """Full lifecycle: invoke → get → cancel → get (not_found).""" - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() store: dict[str, dict] = {} @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: - store[context.invocation_id] = payload + async def handle(payload: dict, context: ConversationContext) -> dict: + store[context.conversation_id] = payload return {"stored": True} - @app.get_invocation_handler - async def get_handler(context: InvocationContext) -> dict: - if context.invocation_id not in store: - raise InvocationError("not_found", "Not found") - return {"data": store[context.invocation_id]} - - @app.cancel_invocation_handler - async def cancel_handler(context: InvocationContext) -> dict: - if context.invocation_id not in store: - raise InvocationError("not_found", "Not found") - del store[context.invocation_id] + @app.get_conversation_handler + async def get_handler(context: ConversationContext) -> dict: + if context.conversation_id not in store: + raise ConversationError("not_found", "Not found") + return {"data": store[context.conversation_id]} + + @app.cancel_conversation_handler + async def cancel_handler(context: ConversationContext) -> dict: + if context.conversation_id not in store: + raise ConversationError("not_found", "Not found") + del store[context.conversation_id] return {"status": "cancelled"} client = TestClient(app) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: # Invoke ws.send_json({"action": "invoke", "payload": {"key": "lifecycle-test"}}) invoke_resp = ws.receive_json() assert invoke_resp["type"] == "result" - inv_id = invoke_resp["invocation_id"] + inv_id = invoke_resp["conversation_id"] # Get - ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) + ws.send_json({"action": "get_conversation", "conversation_id": inv_id}) get_resp = ws.receive_json() assert get_resp["type"] == "result" assert get_resp["payload"]["data"]["key"] == "lifecycle-test" # Cancel - ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) + ws.send_json({"action": "cancel_conversation", "conversation_id": inv_id}) cancel_resp = ws.receive_json() assert cancel_resp["type"] == "result" assert cancel_resp["payload"]["status"] == "cancelled" # Get after cancel - ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) + ws.send_json({"action": "get_conversation", "conversation_id": inv_id}) get_resp2 = ws.receive_json() assert get_resp2["type"] == "error" assert get_resp2["error"]["code"] == "not_found" @@ -146,9 +146,9 @@ async def cancel_handler(context: InvocationContext) -> dict: def test_missing_invoke_handler_returns_error(): """Invoke without registered handler returns not_implemented error.""" - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() client = TestClient(app) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "error" @@ -156,32 +156,32 @@ def test_missing_invoke_handler_returns_error(): def test_missing_get_handler_returns_error(): - """get_invocation without registered handler returns not_found error.""" - app = InvocationAgentServerHost() + """get_conversation without registered handler returns not_found error.""" + app = ConversationAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"ok": True} client = TestClient(app) - with client.websocket_connect("/invocations/ws") as ws: - ws.send_json({"action": "get_invocation", "invocation_id": "some-id"}) + with client.websocket_connect("/conversations/ws") as ws: + ws.send_json({"action": "get_conversation", "conversation_id": "some-id"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "not_found" def test_missing_cancel_handler_returns_error(): - """cancel_invocation without registered handler returns not_found error.""" - app = InvocationAgentServerHost() + """cancel_conversation without registered handler returns not_found error.""" + app = ConversationAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"ok": True} client = TestClient(app) - with client.websocket_connect("/invocations/ws") as ws: - ws.send_json({"action": "cancel_invocation", "invocation_id": "some-id"}) + with client.websocket_connect("/conversations/ws") as ws: + ws.send_json({"action": "cancel_conversation", "conversation_id": "some-id"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "not_found" @@ -193,17 +193,17 @@ async def handle(payload: dict, context: InvocationContext) -> dict: def test_optional_handlers_default_none(): """Get and cancel handlers default to None.""" - app = InvocationAgentServerHost() - assert app._get_invocation_fn is None - assert app._cancel_invocation_fn is None + app = ConversationAgentServerHost() + assert app._get_conversation_fn is None + assert app._cancel_conversation_fn is None def test_optional_handler_override(): """Setting an optional handler replaces None.""" - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() - @app.get_invocation_handler - async def get_handler(context: InvocationContext) -> dict: + @app.get_conversation_handler + async def get_handler(context: ConversationContext) -> dict: return {"ok": True} - assert app._get_invocation_fn is not None + assert app._get_conversation_fn is not None diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_edge_cases.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_edge_cases.py index 87ae3040c26b..c7371fb3887f 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_edge_cases.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_edge_cases.py @@ -1,14 +1,14 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Edge-case tests for InvocationAgentServerHost over WebSocket.""" +"""Edge-case tests for ConversationAgentServerHost over WebSocket.""" import uuid from starlette.testclient import TestClient -from azure.ai.agentserver.invocations import ( - InvocationAgentServerHost, - InvocationContext, +from azure.ai.agentserver.conversations import ( + ConversationAgentServerHost, + ConversationContext, ) @@ -18,7 +18,7 @@ def test_unknown_action_returns_error(echo_client): """Sending an unknown action returns an error.""" - with echo_client.websocket_connect("/invocations/ws") as ws: + with echo_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "unknown_action", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "error" @@ -31,7 +31,7 @@ def test_unknown_action_returns_error(echo_client): def test_invalid_json_returns_error(echo_client): """Sending invalid JSON returns an error but connection stays open.""" - with echo_client.websocket_connect("/invocations/ws") as ws: + with echo_client.websocket_connect("/conversations/ws") as ws: ws.send_text("not valid json {{{") resp = ws.receive_json() assert resp["type"] == "error" @@ -45,7 +45,7 @@ def test_invalid_json_returns_error(echo_client): def test_non_object_json_returns_error(echo_client): """Sending a JSON array instead of object returns an error.""" - with echo_client.websocket_connect("/invocations/ws") as ws: + with echo_client.websocket_connect("/conversations/ws") as ws: ws.send_text("[1, 2, 3]") resp = ws.receive_json() assert resp["type"] == "error" @@ -53,33 +53,33 @@ def test_non_object_json_returns_error(echo_client): # --------------------------------------------------------------------------- -# Invocation ID handling +# Conversation ID handling # --------------------------------------------------------------------------- -def test_invocation_id_auto_generated(echo_client): - """Invocation ID is auto-generated when not provided.""" - with echo_client.websocket_connect("/invocations/ws") as ws: +def test_conversation_id_auto_generated(echo_client): + """Conversation ID is auto-generated when not provided.""" + with echo_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() - assert "invocation_id" in resp - uuid.UUID(resp["invocation_id"]) + assert "conversation_id" in resp + uuid.UUID(resp["conversation_id"]) -def test_invocation_id_accepted_from_message(echo_client): - """Server accepts invocation ID from message field.""" +def test_conversation_id_accepted_from_message(echo_client): + """Server accepts conversation ID from message field.""" custom_id = str(uuid.uuid4()) - with echo_client.websocket_connect("/invocations/ws") as ws: - ws.send_json({"action": "invoke", "invocation_id": custom_id, "payload": {}}) + with echo_client.websocket_connect("/conversations/ws") as ws: + ws.send_json({"action": "invoke", "conversation_id": custom_id, "payload": {}}) resp = ws.receive_json() - assert resp["invocation_id"] == custom_id + assert resp["conversation_id"] == custom_id -def test_invocation_id_generated_when_empty(echo_client): - """When empty invocation ID is sent, server generates one.""" - with echo_client.websocket_connect("/invocations/ws") as ws: - ws.send_json({"action": "invoke", "invocation_id": "", "payload": {}}) +def test_conversation_id_generated_when_empty(echo_client): + """When empty conversation ID is sent, server generates one.""" + with echo_client.websocket_connect("/conversations/ws") as ws: + ws.send_json({"action": "invoke", "conversation_id": "", "payload": {}}) resp = ws.receive_json() - inv_id = resp["invocation_id"] + inv_id = resp["conversation_id"] uuid.UUID(inv_id) @@ -90,7 +90,7 @@ def test_invocation_id_generated_when_empty(echo_client): def test_large_payload(echo_client): """Large payload (dict with big value) is handled correctly.""" big_value = "x" * (1024 * 1024) - with echo_client.websocket_connect("/invocations/ws") as ws: + with echo_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {"data": big_value}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -100,7 +100,7 @@ def test_large_payload(echo_client): def test_unicode_payload(echo_client): """Unicode payload is preserved.""" text = "Hello, 世界! 🌍" - with echo_client.websocket_connect("/invocations/ws") as ws: + with echo_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {"text": text}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -113,48 +113,48 @@ def test_unicode_payload(echo_client): def test_empty_streaming(): """Empty streaming response (no chunks) sends only stream_end.""" - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: InvocationContext): + async def handle(payload: dict, context: ConversationContext): return yield # noqa: E501 — make it a generator client = TestClient(app) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "stream_end" -def test_streaming_has_invocation_id(): - """Streaming messages include invocation_id.""" - app = InvocationAgentServerHost() +def test_streaming_has_conversation_id(): + """Streaming messages include conversation_id.""" + app = ConversationAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: InvocationContext): + async def handle(payload: dict, context: ConversationContext): yield {"chunk": "data"} client = TestClient(app) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() - assert "invocation_id" in resp + assert "conversation_id" in resp # --------------------------------------------------------------------------- -# Invocation lifecycle +# Conversation lifecycle # --------------------------------------------------------------------------- def test_multiple_gets(async_storage_client): - """Multiple gets for the same invocation return the same result.""" - with async_storage_client.websocket_connect("/invocations/ws") as ws: + """Multiple gets for the same conversation return the same result.""" + with async_storage_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "multi-get"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["invocation_id"] + inv_id = invoke_resp["conversation_id"] for _ in range(3): - ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) + ws.send_json({"action": "get_conversation", "conversation_id": inv_id}) get_resp = ws.receive_json() assert get_resp["type"] == "result" assert get_resp["payload"]["data"]["key"] == "multi-get" @@ -162,16 +162,16 @@ def test_multiple_gets(async_storage_client): def test_double_cancel(async_storage_client): """Cancelling twice: second cancel returns error.""" - with async_storage_client.websocket_connect("/invocations/ws") as ws: + with async_storage_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "cancel-twice"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["invocation_id"] + inv_id = invoke_resp["conversation_id"] - ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) + ws.send_json({"action": "cancel_conversation", "conversation_id": inv_id}) cancel1 = ws.receive_json() assert cancel1["type"] == "result" - ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) + ws.send_json({"action": "cancel_conversation", "conversation_id": inv_id}) cancel2 = ws.receive_json() assert cancel2["type"] == "error" assert cancel2["error"]["code"] == "not_found" @@ -179,54 +179,54 @@ def test_double_cancel(async_storage_client): def test_invoke_cancel_get(async_storage_client): """Invoke -> cancel -> get returns not_found error.""" - with async_storage_client.websocket_connect("/invocations/ws") as ws: + with async_storage_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "icg"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["invocation_id"] + inv_id = invoke_resp["conversation_id"] - ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) + ws.send_json({"action": "cancel_conversation", "conversation_id": inv_id}) ws.receive_json() # consume cancel response - ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) + ws.send_json({"action": "get_conversation", "conversation_id": inv_id}) get_resp = ws.receive_json() assert get_resp["type"] == "error" assert get_resp["error"]["code"] == "not_found" # --------------------------------------------------------------------------- -# Multiple sequential invocations on same connection +# Multiple sequential conversations on same connection # --------------------------------------------------------------------------- -def test_multiple_sequential_invocations(echo_client): - """Multiple sequential invocations on the same WebSocket connection.""" - with echo_client.websocket_connect("/invocations/ws") as ws: +def test_multiple_sequential_conversations(echo_client): + """Multiple sequential conversations on the same WebSocket connection.""" + with echo_client.websocket_connect("/conversations/ws") as ws: ids = set() for i in range(10): ws.send_json({"action": "invoke", "payload": {"idx": i}}) resp = ws.receive_json() assert resp["type"] == "result" assert resp["payload"]["echo"]["idx"] == i - ids.add(resp["invocation_id"]) + ids.add(resp["conversation_id"]) assert len(ids) == 10 # --------------------------------------------------------------------------- -# get/cancel without invocation_id +# get/cancel without conversation_id # --------------------------------------------------------------------------- -def test_get_without_invocation_id(echo_client): - """get_invocation without invocation_id returns error.""" - with echo_client.websocket_connect("/invocations/ws") as ws: - ws.send_json({"action": "get_invocation"}) +def test_get_without_conversation_id(echo_client): + """get_conversation without conversation_id returns error.""" + with echo_client.websocket_connect("/conversations/ws") as ws: + ws.send_json({"action": "get_conversation"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "invalid_request" -def test_cancel_without_invocation_id(echo_client): - """cancel_invocation without invocation_id returns error.""" - with echo_client.websocket_connect("/invocations/ws") as ws: - ws.send_json({"action": "cancel_invocation"}) +def test_cancel_without_conversation_id(echo_client): + """cancel_conversation without conversation_id returns error.""" + with echo_client.websocket_connect("/conversations/ws") as ws: + ws.send_json({"action": "cancel_conversation"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "invalid_request" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_get_cancel.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_get_cancel.py index 3a2f707627cd..cdc7d64377f1 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_get_cancel.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_get_cancel.py @@ -1,13 +1,13 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Tests for get_invocation and cancel_invocation actions over WebSocket.""" +"""Tests for get_conversation and cancel_conversation actions over WebSocket.""" from starlette.testclient import TestClient -from azure.ai.agentserver.invocations import ( - InvocationAgentServerHost, - InvocationContext, - InvocationError, +from azure.ai.agentserver.conversations import ( + ConversationAgentServerHost, + ConversationContext, + ConversationError, ) @@ -16,13 +16,13 @@ # --------------------------------------------------------------------------- def test_get_after_invoke_returns_stored_result(async_storage_client): - """get_invocation after invoke returns the stored result.""" - with async_storage_client.websocket_connect("/invocations/ws") as ws: + """get_conversation after invoke returns the stored result.""" + with async_storage_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "stored-data"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["invocation_id"] + inv_id = invoke_resp["conversation_id"] - ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) + ws.send_json({"action": "get_conversation", "conversation_id": inv_id}) get_resp = ws.receive_json() assert get_resp["type"] == "result" @@ -34,9 +34,9 @@ def test_get_after_invoke_returns_stored_result(async_storage_client): # --------------------------------------------------------------------------- def test_get_unknown_id_returns_error(async_storage_client): - """get_invocation with unknown ID returns error.""" - with async_storage_client.websocket_connect("/invocations/ws") as ws: - ws.send_json({"action": "get_invocation", "invocation_id": "unknown-id-12345"}) + """get_conversation with unknown ID returns error.""" + with async_storage_client.websocket_connect("/conversations/ws") as ws: + ws.send_json({"action": "get_conversation", "conversation_id": "unknown-id-12345"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "not_found" @@ -47,13 +47,13 @@ def test_get_unknown_id_returns_error(async_storage_client): # --------------------------------------------------------------------------- def test_cancel_after_invoke_returns_cancelled(async_storage_client): - """cancel_invocation after invoke returns cancelled status.""" - with async_storage_client.websocket_connect("/invocations/ws") as ws: + """cancel_conversation after invoke returns cancelled status.""" + with async_storage_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "cancel-me"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["invocation_id"] + inv_id = invoke_resp["conversation_id"] - ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) + ws.send_json({"action": "cancel_conversation", "conversation_id": inv_id}) cancel_resp = ws.receive_json() assert cancel_resp["type"] == "result" @@ -65,9 +65,9 @@ def test_cancel_after_invoke_returns_cancelled(async_storage_client): # --------------------------------------------------------------------------- def test_cancel_unknown_id_returns_error(async_storage_client): - """cancel_invocation with unknown ID returns error.""" - with async_storage_client.websocket_connect("/invocations/ws") as ws: - ws.send_json({"action": "cancel_invocation", "invocation_id": "unknown-id-12345"}) + """cancel_conversation with unknown ID returns error.""" + with async_storage_client.websocket_connect("/conversations/ws") as ws: + ws.send_json({"action": "cancel_conversation", "conversation_id": "unknown-id-12345"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "not_found" @@ -78,16 +78,16 @@ def test_cancel_unknown_id_returns_error(async_storage_client): # --------------------------------------------------------------------------- def test_get_after_cancel_returns_error(async_storage_client): - """get_invocation after cancel returns error (data removed).""" - with async_storage_client.websocket_connect("/invocations/ws") as ws: + """get_conversation after cancel returns error (data removed).""" + with async_storage_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "temp"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["invocation_id"] + inv_id = invoke_resp["conversation_id"] - ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) + ws.send_json({"action": "cancel_conversation", "conversation_id": inv_id}) ws.receive_json() # consume cancel response - ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) + ws.send_json({"action": "get_conversation", "conversation_id": inv_id}) get_resp = ws.receive_json() assert get_resp["type"] == "error" @@ -98,21 +98,21 @@ def test_get_after_cancel_returns_error(async_storage_client): # GET error returns internal_error # --------------------------------------------------------------------------- -def test_get_invocation_error_returns_internal_error(): - """get_invocation handler raising an exception returns internal_error.""" - app = InvocationAgentServerHost() +def test_get_conversation_error_returns_internal_error(): + """get_conversation handler raising an exception returns internal_error.""" + app = ConversationAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"ok": True} - @app.get_invocation_handler - async def get_handler(context: InvocationContext) -> dict: + @app.get_conversation_handler + async def get_handler(context: ConversationContext) -> dict: raise RuntimeError("get failed") client = TestClient(app) - with client.websocket_connect("/invocations/ws") as ws: - ws.send_json({"action": "get_invocation", "invocation_id": "some-id"}) + with client.websocket_connect("/conversations/ws") as ws: + ws.send_json({"action": "get_conversation", "conversation_id": "some-id"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "internal_error" @@ -122,21 +122,21 @@ async def get_handler(context: InvocationContext) -> dict: # Cancel error returns internal_error # --------------------------------------------------------------------------- -def test_cancel_invocation_error_returns_internal_error(): - """cancel_invocation handler raising an exception returns internal_error.""" - app = InvocationAgentServerHost() +def test_cancel_conversation_error_returns_internal_error(): + """cancel_conversation handler raising an exception returns internal_error.""" + app = ConversationAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"ok": True} - @app.cancel_invocation_handler - async def cancel_handler(context: InvocationContext) -> dict: + @app.cancel_conversation_handler + async def cancel_handler(context: ConversationContext) -> dict: raise RuntimeError("cancel failed") client = TestClient(app) - with client.websocket_connect("/invocations/ws") as ws: - ws.send_json({"action": "cancel_invocation", "invocation_id": "some-id"}) + with client.websocket_connect("/conversations/ws") as ws: + ws.send_json({"action": "cancel_conversation", "conversation_id": "some-id"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "internal_error" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_graceful_shutdown.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_graceful_shutdown.py index fd812188f077..a531d67e3662 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_graceful_shutdown.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_graceful_shutdown.py @@ -1,16 +1,16 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Tests for graceful shutdown with InvocationAgentServerHost.""" +"""Tests for graceful shutdown with ConversationAgentServerHost.""" import asyncio import logging import pytest from starlette.testclient import TestClient -from azure.ai.agentserver.invocations import ( - InvocationAgentServerHost, - InvocationContext, +from azure.ai.agentserver.conversations import ( + ConversationAgentServerHost, + ConversationContext, ) @@ -18,13 +18,13 @@ # Helpers # --------------------------------------------------------------------------- -def _make_server_with_shutdown(**kwargs) -> tuple[InvocationAgentServerHost, list]: - """Create InvocationAgentServerHost with a tracked shutdown handler.""" - server = InvocationAgentServerHost(**kwargs) +def _make_server_with_shutdown(**kwargs) -> tuple[ConversationAgentServerHost, list]: + """Create ConversationAgentServerHost with a tracked shutdown handler.""" + server = ConversationAgentServerHost(**kwargs) calls: list[str] = [] @server.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"ok": True} @server.shutdown_handler @@ -46,10 +46,10 @@ def test_shutdown_handler_registered(): def test_shutdown_handler_not_registered(): """Without @shutdown_handler, _shutdown_fn is None.""" - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"ok": True} assert app._shutdown_fn is None @@ -100,11 +100,11 @@ async def test_shutdown_handler_called_on_lifespan_exit(): @pytest.mark.asyncio async def test_shutdown_handler_timeout(caplog): """Shutdown handler that exceeds timeout is warned about.""" - server = InvocationAgentServerHost(graceful_shutdown_timeout=1) + server = ConversationAgentServerHost(graceful_shutdown_timeout=1) calls: list[str] = [] @server.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"ok": True} @server.shutdown_handler @@ -126,10 +126,10 @@ async def on_shutdown(): @pytest.mark.asyncio async def test_shutdown_handler_exception(caplog): """Shutdown handler that raises is caught and logged.""" - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"ok": True} @app.shutdown_handler @@ -148,19 +148,19 @@ async def on_shutdown(): def test_default_graceful_shutdown_timeout(): """Default graceful shutdown timeout is 30 seconds.""" - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() assert app._graceful_shutdown_timeout == 30 def test_custom_graceful_shutdown_timeout(): """Custom graceful_shutdown_timeout is stored.""" - server = InvocationAgentServerHost(graceful_shutdown_timeout=60) + server = ConversationAgentServerHost(graceful_shutdown_timeout=60) assert server._graceful_shutdown_timeout == 60 def test_zero_graceful_shutdown_timeout(): """Zero timeout disables the drain period.""" - server = InvocationAgentServerHost(graceful_shutdown_timeout=0) + server = ConversationAgentServerHost(graceful_shutdown_timeout=0) assert server._graceful_shutdown_timeout == 0 @@ -183,14 +183,14 @@ def test_health_endpoint_during_operation(): def test_no_shutdown_handler_is_noop(): """Without a shutdown handler, WebSocket and lifespan work fine.""" - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"ok": True} client = TestClient(app) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -204,7 +204,7 @@ def test_multiple_requests_before_shutdown(): """Multiple requests can be served on the same WebSocket connection.""" server, _ = _make_server_with_shutdown() client = TestClient(server) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: for i in range(5): ws.send_json({"action": "invoke", "payload": {"idx": i}}) resp = ws.receive_json() diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_invoke.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_invoke.py index e1cc091f9f89..827a4fc111aa 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_invoke.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_invoke.py @@ -4,7 +4,7 @@ """Tests for the invoke action over WebSocket.""" import uuid -from azure.ai.agentserver.invocations import InvocationContext +from azure.ai.agentserver.conversations import ConversationContext # --------------------------------------------------------------------------- @@ -13,7 +13,7 @@ def test_invoke_echo_payload(echo_client): """Invoke echoes the payload back.""" - with echo_client.websocket_connect("/invocations/ws") as ws: + with echo_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {"msg": "hello world"}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -24,42 +24,42 @@ def test_invoke_echo_payload(echo_client): # IDs # --------------------------------------------------------------------------- -def test_invoke_returns_invocation_id(echo_client): - """Response includes a valid UUID invocation_id.""" - with echo_client.websocket_connect("/invocations/ws") as ws: +def test_invoke_returns_conversation_id(echo_client): + """Response includes a valid UUID conversation_id.""" + with echo_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() - assert "invocation_id" in resp - uuid.UUID(resp["invocation_id"]) + assert "conversation_id" in resp + uuid.UUID(resp["conversation_id"]) def test_invoke_returns_session_id(echo_client): """Response includes a valid UUID session_id.""" - with echo_client.websocket_connect("/invocations/ws") as ws: + with echo_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert "session_id" in resp uuid.UUID(resp["session_id"]) -def test_invoke_unique_invocation_ids(echo_client): - """Each invoke gets a unique invocation ID.""" +def test_invoke_unique_conversation_ids(echo_client): + """Each invoke gets a unique conversation ID.""" ids = set() - with echo_client.websocket_connect("/invocations/ws") as ws: + with echo_client.websocket_connect("/conversations/ws") as ws: for _ in range(5): ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() - ids.add(resp["invocation_id"]) + ids.add(resp["conversation_id"]) assert len(ids) == 5 -def test_invoke_accepts_custom_invocation_id(echo_client): - """If the message includes invocation_id, the server uses it.""" +def test_invoke_accepts_custom_conversation_id(echo_client): + """If the message includes conversation_id, the server uses it.""" custom_id = str(uuid.uuid4()) - with echo_client.websocket_connect("/invocations/ws") as ws: - ws.send_json({"action": "invoke", "invocation_id": custom_id, "payload": {}}) + with echo_client.websocket_connect("/conversations/ws") as ws: + ws.send_json({"action": "invoke", "conversation_id": custom_id, "payload": {}}) resp = ws.receive_json() - assert resp["invocation_id"] == custom_id + assert resp["conversation_id"] == custom_id # --------------------------------------------------------------------------- @@ -68,7 +68,7 @@ def test_invoke_accepts_custom_invocation_id(echo_client): def test_streaming_returns_chunks(streaming_client): """Streaming handler yields 3 chunks then stream_end.""" - with streaming_client.websocket_connect("/invocations/ws") as ws: + with streaming_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) chunks = [] while True: @@ -82,13 +82,13 @@ def test_streaming_returns_chunks(streaming_client): assert chunk == {"chunk": i} -def test_streaming_has_invocation_id(streaming_client): - """Streaming messages include invocation_id.""" - with streaming_client.websocket_connect("/invocations/ws") as ws: +def test_streaming_has_conversation_id(streaming_client): + """Streaming messages include conversation_id.""" + with streaming_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() - assert "invocation_id" in resp - uuid.UUID(resp["invocation_id"]) + assert "conversation_id" in resp + uuid.UUID(resp["conversation_id"]) # --------------------------------------------------------------------------- @@ -97,7 +97,7 @@ def test_streaming_has_invocation_id(streaming_client): def test_invoke_empty_payload(echo_client): """Empty payload doesn't crash the server.""" - with echo_client.websocket_connect("/invocations/ws") as ws: + with echo_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -109,7 +109,7 @@ def test_invoke_empty_payload(echo_client): def test_invoke_error_returns_error(failing_client): """Handler exception returns error message.""" - with failing_client.websocket_connect("/invocations/ws") as ws: + with failing_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "error" @@ -117,17 +117,17 @@ def test_invoke_error_returns_error(failing_client): assert resp["error"]["message"] == "Internal server error" -def test_invoke_error_has_invocation_id(failing_client): - """Error response still includes invocation_id.""" - with failing_client.websocket_connect("/invocations/ws") as ws: +def test_invoke_error_has_conversation_id(failing_client): + """Error response still includes conversation_id.""" + with failing_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() - assert "invocation_id" in resp + assert "conversation_id" in resp def test_error_hides_details_by_default(failing_client): """Exception message is hidden in error responses.""" - with failing_client.websocket_connect("/invocations/ws") as ws: + with failing_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert "something went wrong" not in resp["error"]["message"] diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_multimodal_protocol.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_multimodal_protocol.py index 42aa6c4107e4..701d8f0a5c4d 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_multimodal_protocol.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_multimodal_protocol.py @@ -1,14 +1,14 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Tests for varied payloads with InvocationAgentServerHost over WebSocket.""" +"""Tests for varied payloads with ConversationAgentServerHost over WebSocket.""" import base64 from starlette.testclient import TestClient -from azure.ai.agentserver.invocations import ( - InvocationAgentServerHost, - InvocationContext, +from azure.ai.agentserver.conversations import ( + ConversationAgentServerHost, + ConversationContext, ) @@ -16,12 +16,12 @@ # Helper: echo agent with content type tracking # --------------------------------------------------------------------------- -def _make_content_type_echo_agent() -> InvocationAgentServerHost: +def _make_content_type_echo_agent() -> ConversationAgentServerHost: """Agent that echoes payload and notes the content_type field.""" - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return { "echo": payload, "received_content_type": payload.get("content_type", "unknown"), @@ -30,12 +30,12 @@ async def handle(payload: dict, context: InvocationContext) -> dict: return app -def _make_sse_agent() -> InvocationAgentServerHost: +def _make_sse_agent() -> ConversationAgentServerHost: """Agent that returns streaming chunks.""" - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: InvocationContext): + async def handle(payload: dict, context: ConversationContext): for i in range(3): yield {"event": i} @@ -51,7 +51,7 @@ def test_png_payload(): server = _make_content_type_echo_agent() client = TestClient(server) fake_png = b"\x89PNG\r\n\x1a\n" + b"\x00" * 100 - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({ "action": "invoke", "payload": { @@ -70,7 +70,7 @@ def test_jpeg_payload(): server = _make_content_type_echo_agent() client = TestClient(server) fake_jpeg = b"\xff\xd8\xff\xe0" + b"\x00" * 100 - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({ "action": "invoke", "payload": { @@ -88,7 +88,7 @@ def test_wav_payload(): server = _make_content_type_echo_agent() client = TestClient(server) fake_wav = b"RIFF" + b"\x00" * 100 - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({ "action": "invoke", "payload": { @@ -105,7 +105,7 @@ def test_text_plain_payload(): """text/plain content type payload is accepted.""" server = _make_content_type_echo_agent() client = TestClient(server) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({ "action": "invoke", "payload": { @@ -124,14 +124,14 @@ def test_text_plain_payload(): def test_params_in_payload(): """Arbitrary parameters are accessible in the handler payload.""" - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"name": payload.get("name", "unknown")} client = TestClient(app) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {"name": "Alice"}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -146,7 +146,7 @@ def test_streaming_chunks(): """Streaming response sends multiple chunks.""" server = _make_sse_agent() client = TestClient(server) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) chunks = [] while True: @@ -166,10 +166,10 @@ def test_streaming_chunks(): def test_health_endpoint_returns_200(): """GET /readiness returns 200 with healthy status.""" - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"ok": True} client = TestClient(app) diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_request_limits.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_request_limits.py index cda62064c13f..537187d831a6 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_request_limits.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_request_limits.py @@ -7,20 +7,20 @@ import pytest from starlette.testclient import TestClient -from azure.ai.agentserver.invocations import ( - InvocationAgentServerHost, - InvocationContext, +from azure.ai.agentserver.conversations import ( + ConversationAgentServerHost, + ConversationContext, ) # --------------------------------------------------------------------------- -# InvocationAgentServerHost no longer accepts request_timeout +# ConversationAgentServerHost no longer accepts request_timeout # --------------------------------------------------------------------------- def test_no_request_timeout_parameter(): - """InvocationAgentServerHost no longer accepts request_timeout.""" + """ConversationAgentServerHost no longer accepts request_timeout.""" with pytest.raises(TypeError): - InvocationAgentServerHost(request_timeout=10) + ConversationAgentServerHost(request_timeout=10) # --------------------------------------------------------------------------- @@ -29,15 +29,15 @@ def test_no_request_timeout_parameter(): def test_slow_invoke_completes(): """Without timeout, handler runs to completion.""" - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: await asyncio.sleep(0.1) return {"status": "done"} client = TestClient(app) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "result" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_server_routes.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_server_routes.py index 227f89fd74d5..0e09060d246c 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_server_routes.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_server_routes.py @@ -1,41 +1,41 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Tests for basic server route registration with InvocationAgentServerHost.""" +"""Tests for basic server route registration with ConversationAgentServerHost.""" import uuid from starlette.testclient import TestClient -from azure.ai.agentserver.invocations import ( - InvocationAgentServerHost, - InvocationContext, +from azure.ai.agentserver.conversations import ( + ConversationAgentServerHost, + ConversationContext, ) from conftest import SAMPLE_OPENAPI_SPEC # --------------------------------------------------------------------------- -# WebSocket connection /invocations/ws +# WebSocket connection /conversations/ws # --------------------------------------------------------------------------- def test_websocket_invoke_returns_result(echo_client): """Invoke via WebSocket returns a result.""" - with echo_client.websocket_connect("/invocations/ws") as ws: + with echo_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {"test": True}}) resp = ws.receive_json() assert resp["type"] == "result" # --------------------------------------------------------------------------- -# Invocation ID is valid UUID +# Conversation ID is valid UUID # --------------------------------------------------------------------------- -def test_invoke_returns_uuid_invocation_id(echo_client): - """Invoke returns a valid UUID invocation ID.""" - with echo_client.websocket_connect("/invocations/ws") as ws: +def test_invoke_returns_uuid_conversation_id(echo_client): + """Invoke returns a valid UUID conversation ID.""" + with echo_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() - inv_id = resp["invocation_id"] + inv_id = resp["conversation_id"] parsed = uuid.UUID(inv_id) assert str(parsed) == inv_id @@ -45,8 +45,8 @@ def test_invoke_returns_uuid_invocation_id(echo_client): # --------------------------------------------------------------------------- def test_get_openapi_spec_returns_404_when_not_set(no_spec_client): - """GET /invocations/docs/openapi.json returns 404 when no spec registered.""" - resp = no_spec_client.get("/invocations/docs/openapi.json") + """GET /conversations/docs/openapi.json returns 404 when no spec registered.""" + resp = no_spec_client.get("/conversations/docs/openapi.json") assert resp.status_code == 404 @@ -55,27 +55,27 @@ def test_get_openapi_spec_returns_404_when_not_set(no_spec_client): # --------------------------------------------------------------------------- def test_get_openapi_spec_returns_spec_when_registered(): - """GET /invocations/docs/openapi.json returns the spec when registered.""" - app = InvocationAgentServerHost(openapi_spec=SAMPLE_OPENAPI_SPEC) + """GET /conversations/docs/openapi.json returns the spec when registered.""" + app = ConversationAgentServerHost(openapi_spec=SAMPLE_OPENAPI_SPEC) @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"ok": True} client = TestClient(app) - resp = client.get("/invocations/docs/openapi.json") + resp = client.get("/conversations/docs/openapi.json") assert resp.status_code == 200 assert resp.json() == SAMPLE_OPENAPI_SPEC # --------------------------------------------------------------------------- -# get_invocation returns not_found error by default +# get_conversation returns not_found error by default # --------------------------------------------------------------------------- -def test_get_invocation_returns_not_found_default(echo_client): - """get_invocation without handler returns not_found error.""" - with echo_client.websocket_connect("/invocations/ws") as ws: - ws.send_json({"action": "get_invocation", "invocation_id": "some-id"}) +def test_get_conversation_returns_not_found_default(echo_client): + """get_conversation without handler returns not_found error.""" + with echo_client.websocket_connect("/conversations/ws") as ws: + ws.send_json({"action": "get_conversation", "conversation_id": "some-id"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "not_found" @@ -85,10 +85,10 @@ def test_get_invocation_returns_not_found_default(echo_client): # cancel returns not_found error by default # --------------------------------------------------------------------------- -def test_cancel_invocation_returns_not_found_default(echo_client): - """cancel_invocation without handler returns not_found error.""" - with echo_client.websocket_connect("/invocations/ws") as ws: - ws.send_json({"action": "cancel_invocation", "invocation_id": "some-id"}) +def test_cancel_conversation_returns_not_found_default(echo_client): + """cancel_conversation without handler returns not_found error.""" + with echo_client.websocket_connect("/conversations/ws") as ws: + ws.send_json({"action": "cancel_conversation", "conversation_id": "some-id"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "not_found" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_session_id.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_session_id.py index f56dba92eadd..97977621f055 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_session_id.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_session_id.py @@ -8,9 +8,9 @@ from starlette.testclient import TestClient -from azure.ai.agentserver.invocations import ( - InvocationAgentServerHost, - InvocationContext, +from azure.ai.agentserver.conversations import ( + ConversationAgentServerHost, + ConversationContext, ) @@ -20,7 +20,7 @@ def test_invoke_has_session_id(echo_client): """Invoke response includes session_id.""" - with echo_client.websocket_connect("/invocations/ws") as ws: + with echo_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert "session_id" in resp @@ -33,14 +33,14 @@ def test_invoke_has_session_id(echo_client): def test_invoke_with_session_id_in_message(): """Invoke with session_id in message uses that value.""" - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"ok": True} client = TestClient(app) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({ "action": "invoke", "session_id": "my-custom-session", @@ -56,49 +56,49 @@ async def handle(payload: dict, context: InvocationContext) -> dict: def test_invoke_uses_env_var(): """Invoke uses FOUNDRY_AGENT_SESSION_ID env var when no session_id in message.""" - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"ok": True} client = TestClient(app) with patch.dict(os.environ, {"FOUNDRY_AGENT_SESSION_ID": "env-session"}): - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["session_id"] == "env-session" # --------------------------------------------------------------------------- -# get_invocation does NOT include session_id (not part of get protocol) +# get_conversation does NOT include session_id (not part of get protocol) # --------------------------------------------------------------------------- -def test_get_invocation_no_session_id(async_storage_client): - """get_invocation response does not include session_id.""" - with async_storage_client.websocket_connect("/invocations/ws") as ws: +def test_get_conversation_no_session_id(async_storage_client): + """get_conversation response does not include session_id.""" + with async_storage_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "data"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["invocation_id"] + inv_id = invoke_resp["conversation_id"] - ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) + ws.send_json({"action": "get_conversation", "conversation_id": inv_id}) get_resp = ws.receive_json() assert get_resp["type"] == "result" assert "session_id" not in get_resp # --------------------------------------------------------------------------- -# cancel_invocation does NOT include session_id +# cancel_conversation does NOT include session_id # --------------------------------------------------------------------------- -def test_cancel_invocation_no_session_id(async_storage_client): - """cancel_invocation response does not include session_id.""" - with async_storage_client.websocket_connect("/invocations/ws") as ws: +def test_cancel_conversation_no_session_id(async_storage_client): + """cancel_conversation response does not include session_id.""" + with async_storage_client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "data"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["invocation_id"] + inv_id = invoke_resp["conversation_id"] - ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) + ws.send_json({"action": "cancel_conversation", "conversation_id": inv_id}) cancel_resp = ws.receive_json() assert cancel_resp["type"] == "result" assert "session_id" not in cancel_resp diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_span_parenting.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_span_parenting.py index 04c9babd60a4..ef2c7ee5d10a 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_span_parenting.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_span_parenting.py @@ -10,9 +10,9 @@ import pytest from starlette.testclient import TestClient -from azure.ai.agentserver.invocations import ( - InvocationAgentServerHost, - InvocationContext, +from azure.ai.agentserver.conversations import ( + ConversationAgentServerHost, + ConversationContext, ) @@ -51,11 +51,11 @@ def _make_server_with_child_span(): """Server whose handler creates a child span (simulating a framework).""" with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() child_tracer = trace.get_tracer("test.framework") @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: with child_tracer.start_as_current_span("framework_invoke_agent") as _span: return {"ok": True} @@ -66,11 +66,11 @@ def _make_streaming_server_with_child_span(): """Server with streaming response whose handler creates a child span.""" with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() child_tracer = trace.get_tracer("test.framework") @app.invoke_handler - async def handle(payload: dict, context: InvocationContext): + async def handle(payload: dict, context: ConversationContext): with child_tracer.start_as_current_span("framework_invoke_agent"): yield {"chunk": "data"} @@ -102,7 +102,7 @@ def test_framework_span_is_child_of_invoke_span(): agentserver invoke_agent span, not a sibling.""" server = _make_server_with_child_span() client = TestClient(server) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -114,7 +114,7 @@ def test_framework_span_is_child_streaming(): """Same parent-child relationship holds for streaming responses.""" server = _make_streaming_server_with_child_span() client = TestClient(server) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) while True: resp = ws.receive_json() diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_tracing.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_tracing.py index 85b7555e5ae0..a23ed4716b6e 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_tracing.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_tracing.py @@ -1,7 +1,7 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Tests for OpenTelemetry tracing in the WebSocket invocations protocol.""" +"""Tests for OpenTelemetry tracing in the WebSocket conversations protocol.""" import os import uuid from unittest.mock import patch @@ -9,10 +9,10 @@ import pytest from starlette.testclient import TestClient -from azure.ai.agentserver.invocations import ( - InvocationAgentServerHost, - InvocationContext, - InvocationError, +from azure.ai.agentserver.conversations import ( + ConversationAgentServerHost, + ConversationContext, + ConversationError, ) @@ -68,13 +68,13 @@ def _get_spans(): # --------------------------------------------------------------------------- def _make_tracing_server(**kwargs): - """Create an InvocationAgentServerHost with tracing enabled.""" + """Create an ConversationAgentServerHost with tracing enabled.""" with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): - server = InvocationAgentServerHost(**kwargs) + server = ConversationAgentServerHost(**kwargs) @server.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"echo": payload} return server @@ -84,27 +84,27 @@ def _make_tracing_server_with_get_cancel(**kwargs): """Create a tracing-enabled server with get/cancel handlers.""" with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): - server = InvocationAgentServerHost(**kwargs) + server = ConversationAgentServerHost(**kwargs) store: dict[str, dict] = {} @server.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: - store[context.invocation_id] = payload + async def handle(payload: dict, context: ConversationContext) -> dict: + store[context.conversation_id] = payload return {"stored": True} - @server.get_invocation_handler - async def get_handler(context: InvocationContext) -> dict: - if context.invocation_id in store: - return {"data": store[context.invocation_id]} - raise InvocationError("not_found", "Not found") + @server.get_conversation_handler + async def get_handler(context: ConversationContext) -> dict: + if context.conversation_id in store: + return {"data": store[context.conversation_id]} + raise ConversationError("not_found", "Not found") - @server.cancel_invocation_handler - async def cancel_handler(context: InvocationContext) -> dict: - if context.invocation_id in store: - del store[context.invocation_id] + @server.cancel_conversation_handler + async def cancel_handler(context: ConversationContext) -> dict: + if context.conversation_id in store: + del store[context.conversation_id] return {"status": "cancelled"} - raise InvocationError("not_found", "Not found") + raise ConversationError("not_found", "Not found") return server @@ -113,10 +113,10 @@ def _make_failing_tracing_server(**kwargs): """Create a tracing-enabled server whose handler raises.""" with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): - server = InvocationAgentServerHost(**kwargs) + server = ConversationAgentServerHost(**kwargs) @server.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: raise ValueError("tracing error test") return server @@ -126,10 +126,10 @@ def _make_streaming_tracing_server(**kwargs): """Create a tracing-enabled server with streaming response.""" with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): - server = InvocationAgentServerHost(**kwargs) + server = ConversationAgentServerHost(**kwargs) @server.invoke_handler - async def handle(payload: dict, context: InvocationContext): + async def handle(payload: dict, context: ConversationContext): yield {"chunk": 1} yield {"chunk": 2} @@ -145,14 +145,14 @@ def test_tracing_disabled_by_default(): if _MODULE_EXPORTER: _MODULE_EXPORTER.clear() - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"ok": True} client = TestClient(app) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) ws.receive_json() @@ -169,7 +169,7 @@ def test_tracing_enabled_creates_invoke_span(): """Tracing enabled creates a span named 'invoke_agent'.""" server = _make_tracing_server() client = TestClient(server) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) ws.receive_json() @@ -187,7 +187,7 @@ def test_invoke_error_records_exception(): """When handler raises, the span records the exception.""" server = _make_failing_tracing_server() client = TestClient(server) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "error" @@ -203,37 +203,37 @@ def test_invoke_error_records_exception(): # GET/cancel create spans # --------------------------------------------------------------------------- -def test_get_invocation_creates_span(): - """get_invocation creates a span.""" +def test_get_conversation_creates_span(): + """get_conversation creates a span.""" server = _make_tracing_server_with_get_cancel() client = TestClient(server) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "data"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["invocation_id"] + inv_id = invoke_resp["conversation_id"] - ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) + ws.send_json({"action": "get_conversation", "conversation_id": inv_id}) ws.receive_json() spans = _get_spans() - get_spans = [s for s in spans if "get_invocation" in s.name] + get_spans = [s for s in spans if "get_conversation" in s.name] assert len(get_spans) >= 1 -def test_cancel_invocation_creates_span(): - """cancel_invocation creates a span.""" +def test_cancel_conversation_creates_span(): + """cancel_conversation creates a span.""" server = _make_tracing_server_with_get_cancel() client = TestClient(server) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "data"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["invocation_id"] + inv_id = invoke_resp["conversation_id"] - ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) + ws.send_json({"action": "cancel_conversation", "conversation_id": inv_id}) ws.receive_json() spans = _get_spans() - cancel_spans = [s for s in spans if "cancel_invocation" in s.name] + cancel_spans = [s for s in spans if "cancel_conversation" in s.name] assert len(cancel_spans) >= 1 @@ -245,14 +245,14 @@ def test_tracing_via_appinsights_env_var(): """Tracing is enabled when APPLICATIONINSIGHTS_CONNECTION_STRING is set.""" with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"ok": True} client = TestClient(app) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) ws.receive_json() @@ -271,17 +271,17 @@ def test_no_tracing_when_no_endpoints(): env.pop("APPLICATIONINSIGHTS_CONNECTION_STRING", None) env.pop("OTEL_EXPORTER_OTLP_ENDPOINT", None) with patch.dict(os.environ, env, clear=True): - app = InvocationAgentServerHost() + app = ConversationAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"ok": True} if _MODULE_EXPORTER: _MODULE_EXPORTER.clear() client = TestClient(app) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) ws.receive_json() @@ -298,7 +298,7 @@ def test_streaming_creates_span(): """Streaming response creates and completes a span.""" server = _make_streaming_tracing_server() client = TestClient(server) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) # Consume all streaming messages while True: @@ -319,7 +319,7 @@ def test_genai_attributes_on_invoke_span(): """Invoke span has GenAI semantic convention attributes.""" server = _make_tracing_server() client = TestClient(server) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) ws.receive_json() @@ -341,7 +341,7 @@ def test_session_id_in_conversation_id(): """Session ID is set as gen_ai.conversation.id on invoke span.""" server = _make_tracing_server() client = TestClient(server) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({ "action": "invoke", "session_id": "test-session", diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_ws_keepalive.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_ws_keepalive.py index 0cffdc2b1b55..f6c9299f2dd3 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_ws_keepalive.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_ws_keepalive.py @@ -4,17 +4,17 @@ """Tests for WebSocket ping/pong keep-alive.""" from starlette.testclient import TestClient -from azure.ai.agentserver.invocations import ( - InvocationAgentServerHost, - InvocationContext, +from azure.ai.agentserver.conversations import ( + ConversationAgentServerHost, + ConversationContext, ) def _make_echo_app(**kwargs): - app = InvocationAgentServerHost(**kwargs) + app = ConversationAgentServerHost(**kwargs) @app.invoke_handler - async def handle(payload: dict, context: InvocationContext) -> dict: + async def handle(payload: dict, context: ConversationContext) -> dict: return {"echo": payload} return app @@ -27,16 +27,16 @@ async def handle(payload: dict, context: InvocationContext) -> dict: def test_client_ping_gets_pong(): """Server replies with pong when client sends a ping action.""" client = TestClient(_make_echo_app()) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "ping"}) resp = ws.receive_json() assert resp["type"] == "pong" def test_client_ping_does_not_interrupt_invoke(): - """A ping/pong exchange between invocations doesn't break the connection.""" + """A ping/pong exchange between conversations doesn't break the connection.""" client = TestClient(_make_echo_app()) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: # Normal invoke ws.send_json({"action": "invoke", "payload": {"n": 1}}) r1 = ws.receive_json() @@ -57,7 +57,7 @@ def test_client_ping_does_not_interrupt_invoke(): def test_client_pong_is_accepted_silently(): """Server accepts pong action without returning an error.""" client = TestClient(_make_echo_app()) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "pong"}) # No response expected for pong — verify next invoke still works. ws.send_json({"action": "invoke", "payload": {"ok": True}}) @@ -73,7 +73,7 @@ def test_ping_disabled_with_zero_interval(): """Setting ws_ping_interval=0 disables the background ping task.""" app = _make_echo_app(ws_ping_interval=0) client = TestClient(app) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -83,7 +83,7 @@ def test_custom_ping_interval(): """A custom ws_ping_interval is accepted without error.""" app = _make_echo_app(ws_ping_interval=15) client = TestClient(app) - with client.websocket_connect("/invocations/ws") as ws: + with client.websocket_connect("/conversations/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "result" From 73586874bd31f9f38dda85a86de52a034d706daf Mon Sep 17 00:00:00 2001 From: Xinran Date: Mon, 20 Apr 2026 10:09:25 +0000 Subject: [PATCH 03/10] Update echo agent to support HTTP invocations and adjust documentation accordingly --- .../samples/browser_client/index.html | 8 +- .../samples/streaming_echo_agent/README.md | 14 +-- .../samples/streaming_echo_agent/main.py | 97 +++++++++++++------ .../streaming_echo_agent/requirements.txt | 1 + 4 files changed, 79 insertions(+), 41 deletions(-) diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/index.html b/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/index.html index b20a542347aa..4ab56b508da4 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/index.html +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/index.html @@ -49,7 +49,7 @@

Echo Agent Client

- + Disconnected @@ -91,8 +91,8 @@

Echo Agent Client

cleanup(); if (getMode() === "http") { - urlInput.value = "http://localhost:8088/conversations"; - urlInput.placeholder = "http://host:port/conversations"; + urlInput.value = "http://localhost:8088/invocations"; + urlInput.placeholder = "http://host:port/invocations"; connectBtn.style.display = "none"; disconnectBtn.style.display = "none"; setStatus("connected"); @@ -100,7 +100,7 @@

Echo Agent Client

addMessage("Switched to HTTP (SSE) mode", "system"); } else { urlInput.value = "ws://localhost:8088/conversations/ws"; - urlInput.placeholder = "ws://host:port/conversations/ws"; + urlInput.placeholder = "ws://host:port/conversations/ws"; connectBtn.style.display = ""; disconnectBtn.style.display = ""; setStatus("disconnected"); diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/README.md b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/README.md index be09ab4294f5..11426e8cfecd 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/README.md +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/README.md @@ -1,18 +1,18 @@ **IMPORTANT!** All samples and other resources made available in this GitHub repository ("samples") are designed to assist in accelerating development of agents, solutions, and agent workflows for various scenarios. Review all provided resources and carefully test output behavior in the context of your use case. AI responses may be inaccurate and AI actions should be monitored with human oversight. -# Echo Agent — Conversations Protocol (WebSocket + HTTP SSE Streaming) +# Echo Agent — Conversations (WebSocket) + Invocations (HTTP SSE) Streaming -This sample demonstrates a minimal echo agent built with [azure-ai-agentserver-conversations](https://pypi.org/project/azure-ai-agentserver-conversations/) that streams responses word-by-word. It supports **two communication modes**: +This sample demonstrates a minimal echo agent that combines [azure-ai-agentserver-conversations](https://pypi.org/project/azure-ai-agentserver-conversations/) (WebSocket) and [azure-ai-agentserver-invocations](https://pypi.org/project/azure-ai-agentserver-invocations/) (HTTP SSE) on a single server, streaming responses word-by-word. It supports **two communication modes**: - **WebSocket** — persistent connection at `ws://localhost:8088/conversations/ws` -- **HTTP SSE** — stateless POST at `http://localhost:8088/conversations` +- **HTTP SSE** — stateless POST at `http://localhost:8088/invocations` ## How It Works The agent receives user input and echoes it back with a `🔊 Echo:` prefix. Each word is streamed as a separate token chunk. -- **WebSocket mode**: tokens are sent as `stream_chunk` messages, followed by a `stream_end` signal. -- **HTTP SSE mode**: tokens are sent as `data:` lines per the Server-Sent Events spec, followed by an `event: done` signal. +- **WebSocket mode** (Conversations protocol): tokens are sent as `stream_chunk` messages, followed by a `stream_end` signal. +- **HTTP SSE mode** (Invocations protocol): tokens are sent as `data:` lines per the Server-Sent Events spec, followed by an `event: done` signal. ## Running Locally @@ -62,12 +62,12 @@ async def main(): asyncio.run(main()) ``` -### Test with HTTP SSE +### Test with HTTP SSE (Invocations) Using `curl`: ```bash -curl -N -X POST http://localhost:8088/conversations \ +curl -N -X POST http://localhost:8088/invocations \ -H "Content-Type: application/json" \ -d '{"message": "Hello world!"}' ``` diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/main.py b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/main.py index df596a07c36e..cd6f49c310e1 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/main.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/main.py @@ -1,12 +1,12 @@ # Copyright (c) Microsoft. All rights reserved. -"""Streaming echo agent supporting both WebSocket and HTTP (SSE) conversations. +"""Streaming echo agent supporting both WebSocket and HTTP (SSE) invocations. Echoes user input back as a stream, sending each word as a separate token chunk. Supports two communication modes: - **WebSocket** at ``ws://localhost:8088/conversations/ws`` -- **HTTP SSE** at ``POST http://localhost:8088/conversations`` +- **HTTP SSE** at ``POST http://localhost:8088/invocations`` **Server** (this file):: @@ -37,7 +37,7 @@ async def main(): **HTTP SSE client** (using ``curl``):: - curl -N -X POST http://localhost:8088/conversations \\ + curl -N -X POST http://localhost:8088/invocations \\ -H "Content-Type: application/json" \\ -d '{"message": "Hello world!"}' """ @@ -46,20 +46,15 @@ async def main(): import json from collections.abc import AsyncGenerator +from starlette.middleware.cors import CORSMiddleware from starlette.requests import Request from starlette.responses import Response, StreamingResponse -from starlette.routing import Route from azure.ai.agentserver.conversations import ConversationAgentServerHost, ConversationContext +from azure.ai.agentserver.invocations import InvocationAgentServerHost ECHO_PREFIX = "🔊 Echo: " -_CORS_HEADERS = { - "Access-Control-Allow-Origin": "*", - "Access-Control-Allow-Methods": "POST, OPTIONS", - "Access-Control-Allow-Headers": "Content-Type", -} - async def echo_tokens(message: str) -> AsyncGenerator[dict, None]: """Yield token dicts with simulated latency (shared by both protocols). @@ -74,46 +69,88 @@ async def echo_tokens(message: str) -> AsyncGenerator[dict, None]: await asyncio.sleep(0.1) # simulate token-by-token latency +# ---------------------------------------------------------------------------InvocationAgentServerHost +# Combined host — Conversations (WebSocket) + Invocations (HTTP/SSE) # --------------------------------------------------------------------------- -# HTTP SSE conversation endpoint + + +class EchoAgentHost(ConversationAgentServerHost, InvocationAgentServerHost): + """Combined host supporting both Invocations (HTTP/SSE) and Conversations (WebSocket). + + Both parent classes store their handler in ``_invoke_fn`` with incompatible + signatures, so we keep a separate ``_http_invoke_fn`` for the invocations + endpoint and override ``_dispatch_invoke`` to use it. + """ + + def __init__(self, **kwargs: object) -> None: + self._http_invoke_fn = None + super().__init__(**kwargs) + + def http_invoke_handler(self, fn): # type: ignore[override] + """Register the HTTP invocation handler.""" + self._http_invoke_fn = fn + return fn + + async def _dispatch_invoke(self, request: Request) -> Response: + """Route HTTP invocations to the dedicated HTTP handler.""" + if self._http_invoke_fn is not None: + return await self._http_invoke_fn(request) + raise NotImplementedError( + "No HTTP invoke handler registered. Use @app.http_invoke_handler." + ) + + +app = EchoAgentHost() +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_methods=["*"], + allow_headers=["*"], +) + + # --------------------------------------------------------------------------- +# HTTP SSE invocation endpoint (via azure-ai-agentserver-invocations) +# --------------------------------------------------------------------------- + +async def _generate_sse(message: str, invocation_id: str) -> AsyncGenerator[bytes, None]: + """Yield SSE-formatted token events with simulated latency. + :param message: The user message to echo. + :type message: str + :param invocation_id: The invocation ID for this request. + :type invocation_id: str + """ + async for chunk in echo_tokens(message): + payload = json.dumps(chunk) + yield f"data: {payload}\n\n".encode() + done_payload = json.dumps({"invocation_id": invocation_id}) + yield f"event: done\ndata: {done_payload}\n\n".encode() + + +@app.http_invoke_handler async def handle_http_invoke(request: Request) -> Response: - """HTTP conversation endpoint — streams tokens as Server-Sent Events. + """HTTP invocation endpoint — streams tokens as Server-Sent Events. :param request: The incoming HTTP request. :type request: ~starlette.requests.Request """ - if request.method == "OPTIONS": - return Response(status_code=204, headers=_CORS_HEADERS) - data = await request.json() + invocation_id = request.state.invocation_id message = data.get("message", "Hello! Send me a message and I'll echo it back.") - async def generate_sse() -> AsyncGenerator[bytes, None]: - async for chunk in echo_tokens(message): - payload = json.dumps(chunk) - yield f"data: {payload}\n\n".encode() - yield b"event: done\ndata: {}\n\n" - return StreamingResponse( - generate_sse(), + _generate_sse(message, invocation_id), media_type="text/event-stream", - headers={**_CORS_HEADERS, "Cache-Control": "no-cache", "Connection": "keep-alive"}, + headers={"Cache-Control": "no-cache", "Connection": "keep-alive"}, ) # --------------------------------------------------------------------------- -# Application — WebSocket + HTTP routes +# WebSocket conversation endpoint (via azure-ai-agentserver-websocket) # --------------------------------------------------------------------------- -app = ConversationAgentServerHost( - routes=[ - Route("/conversations", handle_http_invoke, methods=["POST", "OPTIONS"]), - ], -) - @app.invoke_handler async def handle_invoke( diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/requirements.txt b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/requirements.txt index 2db7962ad24d..0aa70e3fbe53 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/requirements.txt +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/requirements.txt @@ -1,2 +1,3 @@ ../../ +../../../azure-ai-agentserver-invocations websockets \ No newline at end of file From 8fb7173f5c41822c3f66272532adac0446f4959b Mon Sep 17 00:00:00 2001 From: Xinran Date: Tue, 21 Apr 2026 03:14:54 +0000 Subject: [PATCH 04/10] rename conversation to websocket --- .../CHANGELOG.md | 12 +- .../MANIFEST.in | 2 +- .../azure-ai-agentserver-websocket/README.md | 136 ++++----- .../{conversations => websocket}/__init__.py | 12 +- .../_constants.py | 18 +- .../{conversations => websocket}/_version.py | 0 .../_websocket.py} | 282 +++++++++--------- .../{conversations => websocket}/py.typed | 0 .../cspell.json | 2 +- .../pyproject.toml | 10 +- .../samples/browser_client/index.html | 6 +- .../samples/streaming_echo_agent/README.md | 10 +- .../streaming_echo_agent/agent.manifest.yaml | 10 +- .../samples/streaming_echo_agent/agent.yaml | 2 +- .../samples/streaming_echo_agent/main.py | 20 +- .../tests/conftest.py | 82 ++--- .../tests/test_decorator_pattern.py | 124 ++++---- .../tests/test_edge_cases.py | 122 ++++---- .../tests/test_get_cancel.py | 80 ++--- .../tests/test_graceful_shutdown.py | 42 +-- .../tests/test_invoke.py | 60 ++-- .../tests/test_multimodal_protocol.py | 40 +-- .../tests/test_request_limits.py | 18 +- .../tests/test_server_routes.py | 52 ++-- .../tests/test_session_id.py | 44 +-- .../tests/test_span_parenting.py | 18 +- .../tests/test_tracing.py | 104 +++---- .../tests/test_ws_keepalive.py | 22 +- 28 files changed, 665 insertions(+), 665 deletions(-) rename sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/{conversations => websocket}/__init__.py (55%) rename sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/{conversations => websocket}/_constants.py (54%) rename sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/{conversations => websocket}/_version.py (100%) rename sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/{conversations/_conversation.py => websocket/_websocket.py} (66%) rename sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/{conversations => websocket}/py.typed (100%) diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/CHANGELOG.md b/sdk/agentserver/azure-ai-agentserver-websocket/CHANGELOG.md index a520e75a9d7d..11cc5ef64e21 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/CHANGELOG.md +++ b/sdk/agentserver/azure-ai-agentserver-websocket/CHANGELOG.md @@ -4,12 +4,12 @@ ### Features Added -- Initial release of `azure-ai-agentserver-conversations`. -- `ConversationHandler` for wiring conversation protocol endpoints to an `AgentHost`. -- Decorator-based handler registration (`@conversations.invoke_handler`). -- Optional `GET /conversations/{id}` and `POST /conversations/{id}/cancel` endpoints. -- `GET /conversations/docs/openapi.json` for OpenAPI spec serving. -- Conversation ID tracking and session correlation via `agent_session_id` query parameter. +- Initial release of `azure-ai-agentserver-websocket`. +- `WebsocketHandler` for wiring websocket protocol endpoints to an `AgentHost`. +- Decorator-based handler registration (`@websocket.invoke_handler`). +- Optional `GET /websocket/{id}` and `POST /websocket/{id}/cancel` endpoints. +- `GET /websocket/docs/openapi.json` for OpenAPI spec serving. +- Websocket ID tracking and session correlation via `agent_session_id` query parameter. - Distributed tracing with GenAI semantic convention span attributes. - W3C Baggage propagation for cross-service correlation. - Streaming response support with span lifecycle management. diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/MANIFEST.in b/sdk/agentserver/azure-ai-agentserver-websocket/MANIFEST.in index dd7cd63de8c3..15f469d979fd 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/MANIFEST.in +++ b/sdk/agentserver/azure-ai-agentserver-websocket/MANIFEST.in @@ -5,4 +5,4 @@ recursive-include samples *.py *.md include azure/__init__.py include azure/ai/__init__.py include azure/ai/agentserver/__init__.py -include azure/ai/agentserver/conversations/py.typed +include azure/ai/agentserver/websocket/py.typed diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/README.md b/sdk/agentserver/azure-ai-agentserver-websocket/README.md index 049ea93b92a2..1cb92c53d201 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/README.md +++ b/sdk/agentserver/azure-ai-agentserver-websocket/README.md @@ -1,13 +1,13 @@ -# Azure AI AgentServerHost Conversations for Python (WebSocket) +# Azure AI AgentServerHost Websocket for Python (WebSocket) -The `azure-ai-agentserver-conversations` package provides the conversation protocol over **WebSocket long connections** for Azure AI Hosted Agent containers. It plugs into the [`azure-ai-agentserver-core`](https://pypi.org/project/azure-ai-agentserver-core/) host framework and exposes a single WebSocket endpoint at `/conversations/ws` that supports invoke, get, cancel, and streaming operations. +The `azure-ai-agentserver-websocket` package provides the websocket protocol over **WebSocket long connections** for Azure AI Hosted Agent containers. It plugs into the [`azure-ai-agentserver-core`](https://pypi.org/project/azure-ai-agentserver-core/) host framework and exposes a single WebSocket endpoint at `/websocket/ws` that supports invoke, get, cancel, and streaming operations. ## Getting started ### Install the package ```bash -pip install azure-ai-agentserver-conversations +pip install azure-ai-agentserver-websocket ``` This automatically installs `azure-ai-agentserver-core` as a dependency. @@ -18,33 +18,33 @@ This automatically installs `azure-ai-agentserver-core` as a dependency. ## Key concepts -### ConversationAgentServerHost +### WebsocketAgentServerHost -`ConversationAgentServerHost` is an `AgentServerHost` subclass that adds a WebSocket endpoint for the conversation protocol. It provides decorator methods for registering handler functions: +`WebsocketAgentServerHost` is an `AgentServerHost` subclass that adds a WebSocket endpoint for the websocket protocol. It provides decorator methods for registering handler functions: - `@app.invoke_handler` — **Required.** Handles `invoke` actions. Supports both async functions (non-streaming) and async generators (streaming). -- `@app.get_conversation_handler` — Optional. Handles `get_conversation` actions. -- `@app.cancel_conversation_handler` — Optional. Handles `cancel_conversation` actions. +- `@app.get_websocket_handler` — Optional. Handles `get_websocket` actions. +- `@app.cancel_websocket_handler` — Optional. Handles `cancel_websocket` actions. -### ConversationContext +### WebsocketContext -Handler functions receive an `ConversationContext` object containing: +Handler functions receive an `WebsocketContext` object containing: -- `context.conversation_id` — The conversation ID (echoed from client or auto-generated UUID). +- `context.websocket_id` — The websocket ID (echoed from client or auto-generated UUID). - `context.session_id` — The resolved session ID. -### ConversationError +### WebsocketError -Handlers can raise `ConversationError(code, message)` to return a domain-specific error to the client without exposing internal details. +Handlers can raise `WebsocketError(code, message)` to return a domain-specific error to the client without exposing internal details. ### WebSocket endpoint -All conversation operations use a single persistent WebSocket connection: +All websocket operations use a single persistent WebSocket connection: | Route | Description | |---|---| -| `ws://host:port/conversations/ws` | WebSocket endpoint for all conversation operations | -| `GET /conversations/docs/openapi.json` | Serve the agent's OpenAPI 3.x spec (HTTP) | +| `ws://host:port/websocket/ws` | WebSocket endpoint for all websocket operations | +| `GET /websocket/docs/openapi.json` | Serve the agent's OpenAPI 3.x spec (HTTP) | | `GET /readiness` | Health check (HTTP) | ### Client → Server messages @@ -52,9 +52,9 @@ All conversation operations use a single persistent WebSocket connection: All messages are JSON text frames with an `action` field: ```json -{"action": "invoke", "payload": {...}, "conversation_id": "optional", "session_id": "optional"} -{"action": "get_conversation", "conversation_id": "required"} -{"action": "cancel_conversation", "conversation_id": "required"} +{"action": "invoke", "payload": {...}, "websocket_id": "optional", "session_id": "optional"} +{"action": "get_websocket", "websocket_id": "required"} +{"action": "cancel_websocket", "websocket_id": "required"} {"action": "ping"} {"action": "pong"} ``` @@ -62,10 +62,10 @@ All messages are JSON text frames with an `action` field: ### Server → Client messages ```json -{"type": "result", "conversation_id": "...", "session_id": "...", "payload": {...}} -{"type": "stream_chunk", "conversation_id": "...", "session_id": "...", "payload": {...}} -{"type": "stream_end", "conversation_id": "...", "session_id": "..."} -{"type": "error", "conversation_id": "...", "error": {"code": "...", "message": "..."}} +{"type": "result", "websocket_id": "...", "session_id": "...", "payload": {...}} +{"type": "stream_chunk", "websocket_id": "...", "session_id": "...", "payload": {...}} +{"type": "stream_end", "websocket_id": "...", "session_id": "..."} +{"type": "error", "websocket_id": "...", "error": {"code": "...", "message": "..."}} {"type": "ping"} {"type": "pong"} ``` @@ -75,18 +75,18 @@ All messages are JSON text frames with an `action` field: Azure APIM and Azure Load Balancer silently drop idle WebSocket connections after approximately 4 minutes, even though the backend supports 60-minute connections. To prevent this, the server sends periodic `{"type": "ping"}` messages to each connected client. - **Default interval**: 30 seconds (well within the ~4-minute idle timeout). -- **Disable**: Pass `ws_ping_interval=0` to `ConversationAgentServerHost()`. +- **Disable**: Pass `ws_ping_interval=0` to `WebsocketAgentServerHost()`. - **Custom interval**: Pass any positive integer, e.g. `ws_ping_interval=15`. Clients should respond with `{"action": "pong"}` when they receive a `{"type": "ping"}` message. Clients may also send `{"action": "ping"}` at any time; the server replies with `{"type": "pong"}`. ```python -app = ConversationAgentServerHost(ws_ping_interval=20) # ping every 20 seconds +app = WebsocketAgentServerHost(ws_ping_interval=20) # ping every 20 seconds ``` ### Session ID resolution -Session IDs group related conversations into a session. The SDK resolves the session ID in order: +Session IDs group related websocket sessions. The SDK resolves the session ID in order: 1. `session_id` field in the WebSocket message 2. `FOUNDRY_AGENT_SESSION_ID` environment variable @@ -94,24 +94,24 @@ Session IDs group related conversations into a session. The SDK resolves the ses ### Distributed tracing -When tracing is enabled on the `AgentServerHost`, conversation spans are automatically created with GenAI semantic conventions: +When tracing is enabled on the `AgentServerHost`, websocket spans are automatically created with GenAI semantic conventions: - **Span name**: `invoke_agent {FOUNDRY_AGENT_NAME}:{FOUNDRY_AGENT_VERSION}` - **Span attributes**: `gen_ai.system`, `gen_ai.operation.name`, `gen_ai.response.id`, `gen_ai.conversation.id`, `gen_ai.agent.id`, `gen_ai.agent.name`, `gen_ai.agent.version` -- **Error tags**: `azure.ai.agentserver.conversations.error.code`, `.error.message` +- **Error tags**: `azure.ai.agentserver.websocket.error.code`, `.error.message` ## Examples ### Simple agent ```python -from azure.ai.agentserver.conversations import ConversationAgentServerHost, ConversationContext +from azure.ai.agentserver.websocket import WebsocketAgentServerHost, WebsocketContext -app = ConversationAgentServerHost() +app = WebsocketAgentServerHost() @app.invoke_handler -async def handle(payload: dict, context: ConversationContext) -> dict: +async def handle(payload: dict, context: WebsocketContext) -> dict: return {"greeting": f"Hello, {payload['name']}!"} app.run() @@ -123,7 +123,7 @@ app.run() import asyncio, json, websockets async def main(): - async with websockets.connect("ws://localhost:8088/conversations/ws") as ws: + async with websockets.connect("ws://localhost:8088/websocket/ws") as ws: await ws.send(json.dumps({ "action": "invoke", "payload": {"name": "Alice"} @@ -144,41 +144,41 @@ asyncio.run(main()) ```python import asyncio -from azure.ai.agentserver.conversations import ( - ConversationAgentServerHost, - ConversationContext, - ConversationError, +from azure.ai.agentserver.websocket import ( + WebsocketAgentServerHost, + WebsocketContext, + WebsocketError, ) _tasks: dict[str, asyncio.Task] = {} _results: dict[str, dict] = {} -app = ConversationAgentServerHost() +app = WebsocketAgentServerHost() @app.invoke_handler -async def handle(payload: dict, context: ConversationContext) -> dict: - task = asyncio.create_task(do_work(context.conversation_id, payload)) - _tasks[context.conversation_id] = task - return {"conversation_id": context.conversation_id, "status": "running"} - - -@app.get_conversation_handler -async def get_conversation(context: ConversationContext) -> dict: - if context.conversation_id in _results: - return _results[context.conversation_id] - if context.conversation_id in _tasks: - return {"conversation_id": context.conversation_id, "status": "running"} - raise ConversationError("not_found", "Conversation not found") - - -@app.cancel_conversation_handler -async def cancel_conversation(context: ConversationContext) -> dict: - if context.conversation_id in _tasks: - _tasks[context.conversation_id].cancel() - del _tasks[context.conversation_id] - return {"conversation_id": context.conversation_id, "status": "cancelled"} - raise ConversationError("not_found", "Conversation not found") +async def handle(payload: dict, context: WebsocketContext) -> dict: + task = asyncio.create_task(do_work(context.websocket_id, payload)) + _tasks[context.websocket_id] = task + return {"websocket_id": context.websocket_id, "status": "running"} + + +@app.get_websocket_handler +async def get_websocket(context: WebsocketContext) -> dict: + if context.websocket_id in _results: + return _results[context.websocket_id] + if context.websocket_id in _tasks: + return {"websocket_id": context.websocket_id, "status": "running"} + raise WebsocketError("not_found", "Websocket not found") + + +@app.cancel_websocket_handler +async def cancel_websocket(context: WebsocketContext) -> dict: + if context.websocket_id in _tasks: + _tasks[context.websocket_id].cancel() + del _tasks[context.websocket_id] + return {"websocket_id": context.websocket_id, "status": "cancelled"} + raise WebsocketError("not_found", "Websocket not found") ``` ### Streaming @@ -186,13 +186,13 @@ async def cancel_conversation(context: ConversationContext) -> dict: Use an async generator to stream chunks back to the client. Each yielded dict is sent as a `stream_chunk` message, followed by a `stream_end` when the generator completes. ```python -from azure.ai.agentserver.conversations import ConversationAgentServerHost, ConversationContext +from azure.ai.agentserver.websocket import WebsocketAgentServerHost, WebsocketContext -app = ConversationAgentServerHost() +app = WebsocketAgentServerHost() @app.invoke_handler -async def handle(payload: dict, context: ConversationContext): +async def handle(payload: dict, context: WebsocketContext): for word in ["Hello", " ", "world", "!"]: yield {"delta": word} ``` @@ -203,7 +203,7 @@ async def handle(payload: dict, context: ConversationContext): import asyncio, json, websockets async def main(): - async with websockets.connect("ws://localhost:8088/conversations/ws") as ws: + async with websockets.connect("ws://localhost:8088/websocket/ws") as ws: await ws.send(json.dumps({"action": "invoke", "payload": {}})) while True: msg = json.loads(await ws.recv()) @@ -220,13 +220,13 @@ asyncio.run(main()) ### Multi-turn conversation -Use the `session_id` field to group conversations into a session over the same WebSocket connection: +Use the `session_id` field to group websocket sessions over the same WebSocket connection: ```python import asyncio, json, websockets async def main(): - async with websockets.connect("ws://localhost:8088/conversations/ws") as ws: + async with websockets.connect("ws://localhost:8088/websocket/ws") as ws: # First turn await ws.send(json.dumps({ "action": "invoke", @@ -248,10 +248,10 @@ asyncio.run(main()) ### Serving an OpenAPI spec -Pass an OpenAPI spec dict to enable the discovery endpoint at `GET /conversations/docs/openapi.json`: +Pass an OpenAPI spec dict to enable the discovery endpoint at `GET /websocket/docs/openapi.json`: ```python -app = ConversationAgentServerHost(openapi_spec={ +app = WebsocketAgentServerHost(openapi_spec={ "openapi": "3.0.3", "info": {"title": "My Agent", "version": "1.0.0"}, "paths": { ... }, @@ -266,11 +266,11 @@ To report an issue with the client library, or request additional features, plea ## Next steps -Visit the [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-conversations/samples) folder for complete working examples: +Visit the [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-websocket/samples) folder for complete working examples: | Sample | Description | |---|---| -| [streaming_invoke_agent](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-conversations/samples/streaming_invoke_agent/) | Streaming code-generation tokens via WebSocket | +| [streaming_invoke_agent](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_invoke_agent/) | Streaming code-generation tokens via WebSocket | ## Contributing diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/__init__.py b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/__init__.py similarity index 55% rename from sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/__init__.py rename to sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/__init__.py index 60cb31771912..5712cd812f93 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/__init__.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/__init__.py @@ -1,17 +1,17 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Conversations protocol for Azure AI Hosted Agents. +"""Websocket protocol for Azure AI Hosted Agents. -This package provides an conversation protocol host as a subclass of +This package provides an websocket protocol host as a subclass of :class:`~azure.ai.agentserver.core.AgentServerHost`. Quick start:: - from azure.ai.agentserver.conversations import ConversationAgentServerHost + from azure.ai.agentserver.websocket import WebsocketAgentServerHost from starlette.responses import JSONResponse - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() @app.invoke_handler async def handle(request): @@ -21,8 +21,8 @@ async def handle(request): """ __path__ = __import__("pkgutil").extend_path(__path__, __name__) -from ._conversation import ConversationAgentServerHost, ConversationContext, ConversationError +from ._websocket import WebsocketAgentServerHost, WebsocketContext, WebsocketError from ._version import VERSION -__all__ = ["ConversationAgentServerHost", "ConversationContext", "ConversationError"] +__all__ = ["WebsocketAgentServerHost", "WebsocketContext", "WebsocketError"] __version__ = VERSION diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/_constants.py b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/_constants.py similarity index 54% rename from sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/_constants.py rename to sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/_constants.py index 1e25f644417d..699427ccd507 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/_constants.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/_constants.py @@ -3,10 +3,10 @@ # --------------------------------------------------------- -class ConversationConstants: - """Conversation protocol constants. +class WebsocketConstants: + """Websocket protocol constants. - Protocol-specific constants for the WebSocket conversation protocol. + Protocol-specific constants for the WebSocket websocket protocol. """ # WebSocket message types (server → client) @@ -19,8 +19,8 @@ class ConversationConstants: # WebSocket actions (client → server) ACTION_INVOKE = "invoke" - ACTION_GET_CONVERSATION = "get_conversation" - ACTION_CANCEL_CONVERSATION = "cancel_conversation" + ACTION_GET_WEBSOCKET = "get_websocket" + ACTION_CANCEL_WEBSOCKET = "cancel_websocket" ACTION_PING = "ping" ACTION_PONG = "pong" @@ -28,7 +28,7 @@ class ConversationConstants: DEFAULT_WS_PING_INTERVAL = 30 # seconds # Span attribute keys - ATTR_SPAN_CONVERSATION_ID = "azure.ai.agentserver.conversations.conversation_id" - ATTR_SPAN_SESSION_ID = "azure.ai.agentserver.conversations.session_id" - ATTR_SPAN_ERROR_CODE = "azure.ai.agentserver.conversations.error.code" - ATTR_SPAN_ERROR_MESSAGE = "azure.ai.agentserver.conversations.error.message" + ATTR_SPAN_WEBSOCKET_ID = "azure.ai.agentserver.websocket.websocket_id" + ATTR_SPAN_SESSION_ID = "azure.ai.agentserver.websocket.session_id" + ATTR_SPAN_ERROR_CODE = "azure.ai.agentserver.websocket.error.code" + ATTR_SPAN_ERROR_MESSAGE = "azure.ai.agentserver.websocket.error.message" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/_version.py b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/_version.py similarity index 100% rename from sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/_version.py rename to sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/_version.py diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/_conversation.py b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/_websocket.py similarity index 66% rename from sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/_conversation.py rename to sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/_websocket.py index e6300c9d3740..bab56b9a145d 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/_conversation.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/_websocket.py @@ -1,9 +1,9 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Conversation protocol host for Azure AI Hosted Agents (WebSocket). +"""Websocket protocol host for Azure AI Hosted Agents (WebSocket). -Provides the conversation protocol over WebSocket long connections +Provides the websocket protocol over WebSocket long connections as a :class:`~azure.ai.agentserver.core.AgentServerHost` subclass. """ import asyncio @@ -30,7 +30,7 @@ record_error, ) -from ._constants import ConversationConstants +from ._constants import WebsocketConstants logger = logging.getLogger("azure.ai.agentserver") @@ -59,23 +59,23 @@ def _sanitize_id(value: str, fallback: str) -> str: @dataclass -class ConversationContext: - """Contextual information for a conversation request. +class WebsocketContext: + """Contextual information for a websocket request. Passed to handler functions registered via :meth:`invoke_handler`, - :meth:`get_conversation_handler`, and :meth:`cancel_conversation_handler`. + :meth:`get_websocket_handler`, and :meth:`cancel_websocket_handler`. - :param conversation_id: Unique identifier for this conversation. - :type conversation_id: str - :param session_id: Session identifier for this conversation. + :param websocket_id: Unique identifier for this websocket. + :type websocket_id: str + :param session_id: Session identifier for this websocket. :type session_id: str """ - conversation_id: str + websocket_id: str session_id: str -class ConversationError(Exception): +class WebsocketError(Exception): """Raised by handlers to signal a domain-specific error. :param code: Machine-readable error code. @@ -90,33 +90,33 @@ def __init__(self, code: str, message: str) -> None: super().__init__(message) -class ConversationAgentServerHost(AgentServerHost): - """Conversation protocol host for Azure AI Hosted Agents over WebSocket. +class WebsocketAgentServerHost(AgentServerHost): + """Websocket protocol host for Azure AI Hosted Agents over WebSocket. A :class:`~azure.ai.agentserver.core.AgentServerHost` subclass that adds - a WebSocket endpoint for the conversation protocol. Use the decorator + a WebSocket endpoint for the websocket protocol. Use the decorator methods to wire handler functions to messages. - WebSocket endpoint: ``/conversations/ws`` + WebSocket endpoint: ``/websocket/ws`` **Client → Server messages** (JSON text frames):: - {"action": "invoke", "conversation_id": "opt", "session_id": "opt", "payload": {...}} - {"action": "get_conversation", "conversation_id": "required"} - {"action": "cancel_conversation", "conversation_id": "required"} + {"action": "invoke", "websocket_id": "opt", "session_id": "opt", "payload": {...}} + {"action": "get_websocket", "websocket_id": "required"} + {"action": "cancel_websocket", "websocket_id": "required"} **Server → Client messages** (JSON text frames):: - {"type": "result", "conversation_id": "...", "session_id": "...", "payload": {...}} - {"type": "stream_chunk", "conversation_id": "...", "session_id": "...", "payload": {...}} - {"type": "stream_end", "conversation_id": "...", "session_id": "..."} - {"type": "error", "conversation_id": "...", "error": {"code": "...", "message": "..."}} + {"type": "result", "websocket_id": "...", "session_id": "...", "payload": {...}} + {"type": "stream_chunk", "websocket_id": "...", "session_id": "...", "payload": {...}} + {"type": "stream_end", "websocket_id": "...", "session_id": "..."} + {"type": "error", "websocket_id": "...", "error": {"code": "...", "message": "..."}} Usage:: - from azure.ai.agentserver.conversations import ConversationAgentServerHost, ConversationContext + from azure.ai.agentserver.websocket import WebsocketAgentServerHost, WebsocketContext - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() @app.invoke_handler async def handle(payload, context): @@ -125,7 +125,7 @@ async def handle(payload, context): app.run() :param openapi_spec: Optional OpenAPI spec dict. When provided, the spec - is served at ``GET /conversations/docs/openapi.json``. + is served at ``GET /websocket/docs/openapi.json``. :type openapi_spec: Optional[dict[str, Any]] :param ws_ping_interval: Interval in seconds between keep-alive ping frames sent to each connected WebSocket client. Keeps the @@ -143,33 +143,33 @@ def __init__( **kwargs: Any, ) -> None: self._invoke_fn: Optional[Callable] = None - self._get_conversation_fn: Optional[Callable] = None - self._cancel_conversation_fn: Optional[Callable] = None + self._get_websocket_fn: Optional[Callable] = None + self._cancel_websocket_fn: Optional[Callable] = None self._openapi_spec = openapi_spec self._ws_ping_interval: int = ( ws_ping_interval if ws_ping_interval is not None - else ConversationConstants.DEFAULT_WS_PING_INTERVAL + else WebsocketConstants.DEFAULT_WS_PING_INTERVAL ) - # Build conversation routes - conversation_routes: list[Any] = [ + # Build websocket routes + websocket_routes: list[Any] = [ Route( - "/conversations/docs/openapi.json", + "/websocket/docs/openapi.json", self._get_openapi_spec_endpoint, methods=["GET"], name="get_openapi_spec", ), WebSocketRoute( - "/conversations/ws", + "/websocket/ws", self._websocket_endpoint, - name="conversations_ws", + name="websocket_ws", ), ] # Merge with any routes from sibling mixins via cooperative init existing = list(kwargs.pop("routes", None) or []) - super().__init__(routes=existing + conversation_routes, **kwargs) + super().__init__(routes=existing + websocket_routes, **kwargs) # ------------------------------------------------------------------ # Handler decorators @@ -180,7 +180,7 @@ def invoke_handler( ) -> Callable[..., Any]: """Register a function as the invoke handler. - The handler receives ``(payload: dict, context: ConversationContext)`` + The handler receives ``(payload: dict, context: WebsocketContext)`` and may be: - An async function returning a ``dict`` (non-streaming). @@ -212,12 +212,12 @@ async def handle(payload, context): self._invoke_fn = fn return fn - def get_conversation_handler( + def get_websocket_handler( self, fn: Callable[..., Any] ) -> Callable[..., Any]: - """Register a function as the get-conversation handler. + """Register a function as the get-websocket handler. - The handler receives ``(context: ConversationContext)`` and returns + The handler receives ``(context: WebsocketContext)`` and returns a ``dict``. :param fn: Async function. @@ -228,18 +228,18 @@ def get_conversation_handler( """ if not inspect.iscoroutinefunction(fn): raise TypeError( - f"get_conversation_handler expects an async function, got {type(fn).__name__}. " + f"get_websocket_handler expects an async function, got {type(fn).__name__}. " "Use 'async def' to define your handler." ) - self._get_conversation_fn = fn + self._get_websocket_fn = fn return fn - def cancel_conversation_handler( + def cancel_websocket_handler( self, fn: Callable[..., Any] ) -> Callable[..., Any]: - """Register a function as the cancel-conversation handler. + """Register a function as the cancel-websocket handler. - The handler receives ``(context: ConversationContext)`` and returns + The handler receives ``(context: WebsocketContext)`` and returns a ``dict``. :param fn: Async function. @@ -250,10 +250,10 @@ def cancel_conversation_handler( """ if not inspect.iscoroutinefunction(fn): raise TypeError( - f"cancel_conversation_handler expects an async function, got {type(fn).__name__}. " + f"cancel_websocket_handler expects an async function, got {type(fn).__name__}. " "Use 'async def' to define your handler." ) - self._cancel_conversation_fn = fn + self._cancel_websocket_fn = fn return fn # ------------------------------------------------------------------ @@ -291,7 +291,7 @@ def _safe_set_attrs(span: Any, attrs: dict[str, str]) -> None: def _request_span( self, headers: Any, - conversation_id: str, + websocket_id: str, span_operation: str, operation_name: Optional[str] = None, session_id: str = "", @@ -300,8 +300,8 @@ def _request_span( :param headers: HTTP/WebSocket handshake headers. :type headers: any - :param conversation_id: The request/conversation ID. - :type conversation_id: str + :param websocket_id: The request/websocket ID. + :type websocket_id: str :param span_operation: Span operation name. :type span_operation: str :param operation_name: Optional ``gen_ai.operation.name`` value. @@ -312,7 +312,7 @@ def _request_span( :rtype: any """ return self.request_span( - headers, conversation_id, span_operation, + headers, websocket_id, span_operation, operation_name=operation_name, session_id=session_id, end_on_exit=False, ) @@ -320,7 +320,7 @@ def _request_span( def _simple_request_span( self, headers: Any, - conversation_id: str, + websocket_id: str, span_operation: str, session_id: str = "", ) -> Any: @@ -330,8 +330,8 @@ def _simple_request_span( :param headers: HTTP/WebSocket handshake headers. :type headers: any - :param conversation_id: The request/conversation ID. - :type conversation_id: str + :param websocket_id: The request/websocket ID. + :type websocket_id: str :param span_operation: Span operation name. :type span_operation: str :param session_id: Session ID (empty string if absent). @@ -340,7 +340,7 @@ def _simple_request_span( :rtype: any """ return self.request_span( - headers, conversation_id, span_operation, + headers, websocket_id, span_operation, session_id=session_id, ) @@ -362,13 +362,13 @@ async def _ws_ping_loop(self, websocket: WebSocket) -> None: try: while True: await asyncio.sleep(self._ws_ping_interval) - await websocket.send_json({"type": ConversationConstants.MSG_TYPE_PING}) + await websocket.send_json({"type": WebsocketConstants.MSG_TYPE_PING}) except (WebSocketDisconnect, Exception): # pylint: disable=broad-exception-caught # Connection closed or errored — let the task exit silently. pass async def _websocket_endpoint(self, websocket: WebSocket) -> None: - """Main WebSocket endpoint for the conversation protocol. + """Main WebSocket endpoint for the websocket protocol. Accepts a WebSocket connection and processes JSON messages in a loop. Each message must contain an ``action`` field. @@ -394,34 +394,34 @@ async def _websocket_endpoint(self, websocket: WebSocket) -> None: message = json.loads(raw) except (json.JSONDecodeError, ValueError): await websocket.send_json({ - "type": ConversationConstants.MSG_TYPE_ERROR, + "type": WebsocketConstants.MSG_TYPE_ERROR, "error": {"code": "invalid_json", "message": "Invalid JSON message"}, }) continue if not isinstance(message, dict): await websocket.send_json({ - "type": ConversationConstants.MSG_TYPE_ERROR, + "type": WebsocketConstants.MSG_TYPE_ERROR, "error": {"code": "invalid_message", "message": "Message must be a JSON object"}, }) continue action = message.get("action") - if action == ConversationConstants.ACTION_INVOKE: + if action == WebsocketConstants.ACTION_INVOKE: await self._handle_ws_invoke(websocket, message) - elif action == ConversationConstants.ACTION_GET_CONVERSATION: - await self._handle_ws_get_conversation(websocket, message) - elif action == ConversationConstants.ACTION_CANCEL_CONVERSATION: - await self._handle_ws_cancel_conversation(websocket, message) - elif action == ConversationConstants.ACTION_PING: + elif action == WebsocketConstants.ACTION_GET_WEBSOCKET: + await self._handle_ws_get_websocket(websocket, message) + elif action == WebsocketConstants.ACTION_CANCEL_WEBSOCKET: + await self._handle_ws_cancel_websocket(websocket, message) + elif action == WebsocketConstants.ACTION_PING: # Client-initiated ping — respond with pong. - await websocket.send_json({"type": ConversationConstants.MSG_TYPE_PONG}) - elif action == ConversationConstants.ACTION_PONG: + await websocket.send_json({"type": WebsocketConstants.MSG_TYPE_PONG}) + elif action == WebsocketConstants.ACTION_PONG: # Client pong response — no-op, already kept connection alive. pass else: await websocket.send_json({ - "type": ConversationConstants.MSG_TYPE_ERROR, + "type": WebsocketConstants.MSG_TYPE_ERROR, "error": { "code": "invalid_action", "message": f"Unknown action: {action}", @@ -443,8 +443,8 @@ async def _websocket_endpoint(self, websocket: WebSocket) -> None: async def _handle_ws_invoke(self, websocket: WebSocket, message: dict[str, Any]) -> None: generated_id = str(uuid.uuid4()) - raw_conversation_id = message.get("conversation_id") or "" - conversation_id = _sanitize_id(raw_conversation_id, generated_id) + raw_websocket_id = message.get("websocket_id") or "" + websocket_id = _sanitize_id(raw_websocket_id, generated_id) raw_session_id = ( message.get("session_id") @@ -453,13 +453,13 @@ async def _handle_ws_invoke(self, websocket: WebSocket, message: dict[str, Any]) ) session_id = _sanitize_id(raw_session_id, str(uuid.uuid4())) - context = ConversationContext(conversation_id=conversation_id, session_id=session_id) + context = WebsocketContext(websocket_id=websocket_id, session_id=session_id) payload = message.get("payload", {}) if self._invoke_fn is None: await websocket.send_json({ - "type": ConversationConstants.MSG_TYPE_ERROR, - "conversation_id": conversation_id, + "type": WebsocketConstants.MSG_TYPE_ERROR, + "websocket_id": websocket_id, "session_id": session_id, "error": { "code": "not_implemented", @@ -469,12 +469,12 @@ async def _handle_ws_invoke(self, websocket: WebSocket, message: dict[str, Any]) return with self._request_span( - websocket.headers, conversation_id, "invoke_agent", + websocket.headers, websocket_id, "invoke_agent", operation_name="invoke_agent", session_id=session_id, ) as otel_span: self._safe_set_attrs(otel_span, { - ConversationConstants.ATTR_SPAN_CONVERSATION_ID: conversation_id, - ConversationConstants.ATTR_SPAN_SESSION_ID: session_id, + WebsocketConstants.ATTR_SPAN_WEBSOCKET_ID: websocket_id, + WebsocketConstants.ATTR_SPAN_SESSION_ID: session_id, }) try: @@ -482,49 +482,49 @@ async def _handle_ws_invoke(self, websocket: WebSocket, message: dict[str, Any]) # Streaming response async for chunk in self._invoke_fn(payload, context): await websocket.send_json({ - "type": ConversationConstants.MSG_TYPE_STREAM_CHUNK, - "conversation_id": conversation_id, + "type": WebsocketConstants.MSG_TYPE_STREAM_CHUNK, + "websocket_id": websocket_id, "session_id": session_id, "payload": chunk, }) await websocket.send_json({ - "type": ConversationConstants.MSG_TYPE_STREAM_END, - "conversation_id": conversation_id, + "type": WebsocketConstants.MSG_TYPE_STREAM_END, + "websocket_id": websocket_id, "session_id": session_id, }) else: # Non-streaming response result = await self._invoke_fn(payload, context) await websocket.send_json({ - "type": ConversationConstants.MSG_TYPE_RESULT, - "conversation_id": conversation_id, + "type": WebsocketConstants.MSG_TYPE_RESULT, + "websocket_id": websocket_id, "session_id": session_id, "payload": result, }) - except ConversationError as exc: + except WebsocketError as exc: self._safe_set_attrs(otel_span, { - ConversationConstants.ATTR_SPAN_ERROR_CODE: exc.code, - ConversationConstants.ATTR_SPAN_ERROR_MESSAGE: exc.message, + WebsocketConstants.ATTR_SPAN_ERROR_CODE: exc.code, + WebsocketConstants.ATTR_SPAN_ERROR_MESSAGE: exc.message, }) end_span(otel_span, exc=exc) - logger.error("Conversation %s failed: %s", conversation_id, exc) + logger.error("Websocket %s failed: %s", websocket_id, exc) await websocket.send_json({ - "type": ConversationConstants.MSG_TYPE_ERROR, - "conversation_id": conversation_id, + "type": WebsocketConstants.MSG_TYPE_ERROR, + "websocket_id": websocket_id, "session_id": session_id, "error": {"code": exc.code, "message": exc.message}, }) return except Exception as exc: # pylint: disable=broad-exception-caught self._safe_set_attrs(otel_span, { - ConversationConstants.ATTR_SPAN_ERROR_CODE: "internal_error", - ConversationConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), + WebsocketConstants.ATTR_SPAN_ERROR_CODE: "internal_error", + WebsocketConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), }) end_span(otel_span, exc=exc) - logger.error("Error processing conversation %s: %s", conversation_id, exc, exc_info=True) + logger.error("Error processing websocket %s: %s", websocket_id, exc, exc_info=True) await websocket.send_json({ - "type": ConversationConstants.MSG_TYPE_ERROR, - "conversation_id": conversation_id, + "type": WebsocketConstants.MSG_TYPE_ERROR, + "websocket_id": websocket_id, "session_id": session_id, "error": {"code": "internal_error", "message": "Internal server error"}, }) @@ -534,119 +534,119 @@ async def _handle_ws_invoke(self, websocket: WebSocket, message: dict[str, Any]) end_span(otel_span) # ------------------------------------------------------------------ - # Get-conversation handler + # Get-websocket handler # ------------------------------------------------------------------ - async def _handle_ws_get_conversation(self, websocket: WebSocket, message: dict[str, Any]) -> None: - conversation_id = message.get("conversation_id") or "" - if not conversation_id: + async def _handle_ws_get_websocket(self, websocket: WebSocket, message: dict[str, Any]) -> None: + websocket_id = message.get("websocket_id") or "" + if not websocket_id: await websocket.send_json({ - "type": ConversationConstants.MSG_TYPE_ERROR, - "error": {"code": "invalid_request", "message": "conversation_id is required"}, + "type": WebsocketConstants.MSG_TYPE_ERROR, + "error": {"code": "invalid_request", "message": "websocket_id is required"}, }) return session_id = message.get("session_id") or "" - context = ConversationContext(conversation_id=conversation_id, session_id=session_id) + context = WebsocketContext(websocket_id=websocket_id, session_id=session_id) with self._simple_request_span( - websocket.headers, conversation_id, "get_conversation", + websocket.headers, websocket_id, "get_websocket", session_id=session_id, ) as otel_span: self._safe_set_attrs(otel_span, { - ConversationConstants.ATTR_SPAN_CONVERSATION_ID: conversation_id, - ConversationConstants.ATTR_SPAN_SESSION_ID: session_id, + WebsocketConstants.ATTR_SPAN_WEBSOCKET_ID: websocket_id, + WebsocketConstants.ATTR_SPAN_SESSION_ID: session_id, }) - if self._get_conversation_fn is None: + if self._get_websocket_fn is None: await websocket.send_json({ - "type": ConversationConstants.MSG_TYPE_ERROR, - "conversation_id": conversation_id, - "error": {"code": "not_found", "message": "get_conversation not implemented"}, + "type": WebsocketConstants.MSG_TYPE_ERROR, + "websocket_id": websocket_id, + "error": {"code": "not_found", "message": "get_websocket not implemented"}, }) return try: - result = await self._get_conversation_fn(context) + result = await self._get_websocket_fn(context) await websocket.send_json({ - "type": ConversationConstants.MSG_TYPE_RESULT, - "conversation_id": conversation_id, + "type": WebsocketConstants.MSG_TYPE_RESULT, + "websocket_id": websocket_id, "payload": result, }) - except ConversationError as exc: + except WebsocketError as exc: await websocket.send_json({ - "type": ConversationConstants.MSG_TYPE_ERROR, - "conversation_id": conversation_id, + "type": WebsocketConstants.MSG_TYPE_ERROR, + "websocket_id": websocket_id, "error": {"code": exc.code, "message": exc.message}, }) except Exception as exc: # pylint: disable=broad-exception-caught self._safe_set_attrs(otel_span, { - ConversationConstants.ATTR_SPAN_ERROR_CODE: "internal_error", - ConversationConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), + WebsocketConstants.ATTR_SPAN_ERROR_CODE: "internal_error", + WebsocketConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), }) record_error(otel_span, exc) - logger.error("Error in get_conversation %s: %s", conversation_id, exc, exc_info=True) + logger.error("Error in get_websocket %s: %s", websocket_id, exc, exc_info=True) await websocket.send_json({ - "type": ConversationConstants.MSG_TYPE_ERROR, - "conversation_id": conversation_id, + "type": WebsocketConstants.MSG_TYPE_ERROR, + "websocket_id": websocket_id, "error": {"code": "internal_error", "message": "Internal server error"}, }) # ------------------------------------------------------------------ - # Cancel-conversation handler + # Cancel-websocket handler # ------------------------------------------------------------------ - async def _handle_ws_cancel_conversation(self, websocket: WebSocket, message: dict[str, Any]) -> None: - conversation_id = message.get("conversation_id") or "" - if not conversation_id: + async def _handle_ws_cancel_websocket(self, websocket: WebSocket, message: dict[str, Any]) -> None: + websocket_id = message.get("websocket_id") or "" + if not websocket_id: await websocket.send_json({ - "type": ConversationConstants.MSG_TYPE_ERROR, - "error": {"code": "invalid_request", "message": "conversation_id is required"}, + "type": WebsocketConstants.MSG_TYPE_ERROR, + "error": {"code": "invalid_request", "message": "websocket_id is required"}, }) return session_id = message.get("session_id") or "" - context = ConversationContext(conversation_id=conversation_id, session_id=session_id) + context = WebsocketContext(websocket_id=websocket_id, session_id=session_id) with self._simple_request_span( - websocket.headers, conversation_id, "cancel_conversation", + websocket.headers, websocket_id, "cancel_websocket", session_id=session_id, ) as otel_span: self._safe_set_attrs(otel_span, { - ConversationConstants.ATTR_SPAN_CONVERSATION_ID: conversation_id, - ConversationConstants.ATTR_SPAN_SESSION_ID: session_id, + WebsocketConstants.ATTR_SPAN_WEBSOCKET_ID: websocket_id, + WebsocketConstants.ATTR_SPAN_SESSION_ID: session_id, }) - if self._cancel_conversation_fn is None: + if self._cancel_websocket_fn is None: await websocket.send_json({ - "type": ConversationConstants.MSG_TYPE_ERROR, - "conversation_id": conversation_id, - "error": {"code": "not_found", "message": "cancel_conversation not implemented"}, + "type": WebsocketConstants.MSG_TYPE_ERROR, + "websocket_id": websocket_id, + "error": {"code": "not_found", "message": "cancel_websocket not implemented"}, }) return try: - result = await self._cancel_conversation_fn(context) + result = await self._cancel_websocket_fn(context) await websocket.send_json({ - "type": ConversationConstants.MSG_TYPE_RESULT, - "conversation_id": conversation_id, + "type": WebsocketConstants.MSG_TYPE_RESULT, + "websocket_id": websocket_id, "payload": result, }) - except ConversationError as exc: + except WebsocketError as exc: await websocket.send_json({ - "type": ConversationConstants.MSG_TYPE_ERROR, - "conversation_id": conversation_id, + "type": WebsocketConstants.MSG_TYPE_ERROR, + "websocket_id": websocket_id, "error": {"code": exc.code, "message": exc.message}, }) except Exception as exc: # pylint: disable=broad-exception-caught self._safe_set_attrs(otel_span, { - ConversationConstants.ATTR_SPAN_ERROR_CODE: "internal_error", - ConversationConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), + WebsocketConstants.ATTR_SPAN_ERROR_CODE: "internal_error", + WebsocketConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), }) record_error(otel_span, exc) - logger.error("Error in cancel_conversation %s: %s", conversation_id, exc, exc_info=True) + logger.error("Error in cancel_websocket %s: %s", websocket_id, exc, exc_info=True) await websocket.send_json({ - "type": ConversationConstants.MSG_TYPE_ERROR, - "conversation_id": conversation_id, + "type": WebsocketConstants.MSG_TYPE_ERROR, + "websocket_id": websocket_id, "error": {"code": "internal_error", "message": "Internal server error"}, }) diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/py.typed b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/py.typed similarity index 100% rename from sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/conversations/py.typed rename to sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/py.typed diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/cspell.json b/sdk/agentserver/azure-ai-agentserver-websocket/cspell.json index 10b6a26672a2..9e5aa4ee4889 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/cspell.json +++ b/sdk/agentserver/azure-ai-agentserver-websocket/cspell.json @@ -7,7 +7,7 @@ "caplog", "genai", "hypercorn", - "conversations", + "websocket", "openapi", "paramtype", "pytestmark", diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/pyproject.toml b/sdk/agentserver/azure-ai-agentserver-websocket/pyproject.toml index 8673bbe5d5b2..d5dc0a51522b 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/pyproject.toml +++ b/sdk/agentserver/azure-ai-agentserver-websocket/pyproject.toml @@ -1,7 +1,7 @@ [project] name = "azure-ai-agentserver-websocket" dynamic = ["version", "readme"] -description = "Conversations protocol for Azure AI Hosted Agents" +description = "Websocket protocol for Azure AI Hosted Agents" requires-python = ">=3.10" authors = [ { name = "Microsoft Corporation", email = "azpysdkhelp@microsoft.com" }, @@ -18,7 +18,7 @@ classifiers = [ "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.14", ] -keywords = ["azure", "azure sdk", "agent", "agentserver", "conversations", "websocket"] +keywords = ["azure", "azure sdk", "agent", "agentserver", "websocket"] dependencies = [ "azure-ai-agentserver-core>=2.0.0b1", @@ -42,11 +42,11 @@ exclude = [ ] [tool.setuptools.dynamic] -version = { attr = "azure.ai.agentserver.conversations._version.VERSION" } +version = { attr = "azure.ai.agentserver.websocket._version.VERSION" } readme = { file = ["README.md"], content-type = "text/markdown" } [tool.setuptools.package-data] -"azure.ai.agentserver.conversations" = ["py.typed"] +"azure.ai.agentserver.websocket" = ["py.typed"] [tool.ruff] line-length = 120 @@ -56,7 +56,7 @@ lint.ignore = [] fix = false [tool.ruff.lint.isort] -known-first-party = ["azure.ai.agentserver.conversations"] +known-first-party = ["azure.ai.agentserver.websocket"] combine-as-imports = true [tool.azure-sdk-build] diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/index.html b/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/index.html index 4ab56b508da4..577654a44c95 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/index.html +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/index.html @@ -49,7 +49,7 @@

Echo Agent Client

- + Disconnected @@ -99,8 +99,8 @@

Echo Agent Client

setInputEnabled(true); addMessage("Switched to HTTP (SSE) mode", "system"); } else { - urlInput.value = "ws://localhost:8088/conversations/ws"; - urlInput.placeholder = "ws://host:port/conversations/ws"; + urlInput.value = "ws://localhost:8088/websocket/ws"; + urlInput.placeholder = "ws://host:port/websocket/ws"; connectBtn.style.display = ""; disconnectBtn.style.display = ""; setStatus("disconnected"); diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/README.md b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/README.md index 11426e8cfecd..0d29025432dc 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/README.md +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/README.md @@ -1,17 +1,17 @@ **IMPORTANT!** All samples and other resources made available in this GitHub repository ("samples") are designed to assist in accelerating development of agents, solutions, and agent workflows for various scenarios. Review all provided resources and carefully test output behavior in the context of your use case. AI responses may be inaccurate and AI actions should be monitored with human oversight. -# Echo Agent — Conversations (WebSocket) + Invocations (HTTP SSE) Streaming +# Echo Agent — Websocket (WebSocket) + Invocations (HTTP SSE) Streaming -This sample demonstrates a minimal echo agent that combines [azure-ai-agentserver-conversations](https://pypi.org/project/azure-ai-agentserver-conversations/) (WebSocket) and [azure-ai-agentserver-invocations](https://pypi.org/project/azure-ai-agentserver-invocations/) (HTTP SSE) on a single server, streaming responses word-by-word. It supports **two communication modes**: +This sample demonstrates a minimal echo agent that combines [azure-ai-agentserver-websocket](https://pypi.org/project/azure-ai-agentserver-websocket/) (WebSocket) and [azure-ai-agentserver-invocations](https://pypi.org/project/azure-ai-agentserver-invocations/) (HTTP SSE) on a single server, streaming responses word-by-word. It supports **two communication modes**: -- **WebSocket** — persistent connection at `ws://localhost:8088/conversations/ws` +- **WebSocket** — persistent connection at `ws://localhost:8088/websocket/ws` - **HTTP SSE** — stateless POST at `http://localhost:8088/invocations` ## How It Works The agent receives user input and echoes it back with a `🔊 Echo:` prefix. Each word is streamed as a separate token chunk. -- **WebSocket mode** (Conversations protocol): tokens are sent as `stream_chunk` messages, followed by a `stream_end` signal. +- **WebSocket mode** (Websocket protocol): tokens are sent as `stream_chunk` messages, followed by a `stream_end` signal. - **HTTP SSE mode** (Invocations protocol): tokens are sent as `data:` lines per the Server-Sent Events spec, followed by an `event: done` signal. ## Running Locally @@ -43,7 +43,7 @@ Using the `websockets` library: import asyncio, json, websockets async def main(): - async with websockets.connect("ws://localhost:8088/conversations/ws") as ws: + async with websockets.connect("ws://localhost:8088/websocket/ws") as ws: await ws.send(json.dumps({ "action": "invoke", "payload": {"message": "Hello world!"} diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.manifest.yaml b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.manifest.yaml index b77578fb4ba2..ebb342e56334 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.manifest.yaml +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.manifest.yaml @@ -1,16 +1,16 @@ -name: echo-agent-conversations-websocket-streaming +name: echo-agent-websocket-websocket-streaming description: > A simple echo agent that streams responses word-by-word using the - azure-ai-agentserver-conversations SDK with WebSocket streaming. + azure-ai-agentserver-websocket SDK with WebSocket streaming. metadata: tags: - AI Agent Hosting - Azure AI AgentServer - - Conversations Protocol + - Websocket Protocol - Streaming template: - name: echo-agent-conversations-streaming + name: echo-agent-websocket-streaming kind: hosted protocols: - - protocol: conversations + - protocol: websocket version: v0.0.1 diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.yaml b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.yaml index 73bb8c93d51a..0eb9468b3c8d 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.yaml +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.yaml @@ -1,7 +1,7 @@ kind: hosted name: echo-agent-streaming protocols: - - protocol: conversations + - protocol: websocket version: v0.0.1 resources: cpu: "0.25" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/main.py b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/main.py index cd6f49c310e1..e9bca70b5cdb 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/main.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/main.py @@ -5,7 +5,7 @@ Echoes user input back as a stream, sending each word as a separate token chunk. Supports two communication modes: -- **WebSocket** at ``ws://localhost:8088/conversations/ws`` +- **WebSocket** at ``ws://localhost:8088/websocket/ws`` - **HTTP SSE** at ``POST http://localhost:8088/invocations`` **Server** (this file):: @@ -17,7 +17,7 @@ import asyncio, json, websockets async def main(): - async with websockets.connect("ws://localhost:8088/conversations/ws") as ws: + async with websockets.connect("ws://localhost:8088/websocket/ws") as ws: await ws.send(json.dumps({ "action": "invoke", "payload": {"message": "Hello world!"} @@ -50,7 +50,7 @@ async def main(): from starlette.requests import Request from starlette.responses import Response, StreamingResponse -from azure.ai.agentserver.conversations import ConversationAgentServerHost, ConversationContext +from azure.ai.agentserver.websocket import WebsocketAgentServerHost, WebsocketContext from azure.ai.agentserver.invocations import InvocationAgentServerHost ECHO_PREFIX = "🔊 Echo: " @@ -70,12 +70,12 @@ async def echo_tokens(message: str) -> AsyncGenerator[dict, None]: # ---------------------------------------------------------------------------InvocationAgentServerHost -# Combined host — Conversations (WebSocket) + Invocations (HTTP/SSE) +# Combined host — Websocket (WebSocket) + Invocations (HTTP/SSE) # --------------------------------------------------------------------------- -class EchoAgentHost(ConversationAgentServerHost, InvocationAgentServerHost): - """Combined host supporting both Invocations (HTTP/SSE) and Conversations (WebSocket). +class EchoAgentHost(WebsocketAgentServerHost, InvocationAgentServerHost): + """Combined host supporting both Invocations (HTTP/SSE) and Websocket (WebSocket). Both parent classes store their handler in ``_invoke_fn`` with incompatible signatures, so we keep a separate ``_http_invoke_fn`` for the invocations @@ -148,20 +148,20 @@ async def handle_http_invoke(request: Request) -> Response: # --------------------------------------------------------------------------- -# WebSocket conversation endpoint (via azure-ai-agentserver-websocket) +# WebSocket websocket endpoint (via azure-ai-agentserver-websocket) # --------------------------------------------------------------------------- @app.invoke_handler async def handle_invoke( - payload: dict, context: ConversationContext # pylint: disable=unused-argument + payload: dict, context: WebsocketContext # pylint: disable=unused-argument ) -> AsyncGenerator[dict, None]: """WebSocket streaming handler — each chunk is a ``stream_chunk`` message. :param payload: The client request payload. :type payload: dict - :param context: Conversation context with IDs. - :type context: ConversationContext + :param context: Websocket context with IDs. + :type context: WebsocketContext """ message = payload.get( "message", "Hello! Send me a message and I'll echo it back.") diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/conftest.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/conftest.py index 0520817f3487..edfcbd1fffc4 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/conftest.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/conftest.py @@ -1,16 +1,16 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Shared fixtures and factory functions for conversations WebSocket tests.""" +"""Shared fixtures and factory functions for websocket WebSocket tests.""" from typing import Any import pytest from starlette.testclient import TestClient -from azure.ai.agentserver.conversations import ( - ConversationAgentServerHost, - ConversationContext, - ConversationError, +from azure.ai.agentserver.websocket import ( + WebsocketAgentServerHost, + WebsocketContext, + WebsocketError, ) @@ -22,7 +22,7 @@ "openapi": "3.0.0", "info": {"title": "Echo Agent", "version": "1.0.0"}, "paths": { - "/conversations": { + "/websocket": { "post": { "requestBody": { "required": True, @@ -64,72 +64,72 @@ # --------------------------------------------------------------------------- -def _make_echo_agent(**kwargs: Any) -> ConversationAgentServerHost: - """Create an ConversationAgentServerHost whose invoke handler echoes the payload.""" - app = ConversationAgentServerHost(**kwargs) +def _make_echo_agent(**kwargs: Any) -> WebsocketAgentServerHost: + """Create an WebsocketAgentServerHost whose invoke handler echoes the payload.""" + app = WebsocketAgentServerHost(**kwargs) @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: - return {"echo": payload, "conversation_id": context.conversation_id} + async def handle(payload: dict, context: WebsocketContext) -> dict: + return {"echo": payload, "websocket_id": context.websocket_id} return app -def _make_streaming_agent(**kwargs: Any) -> ConversationAgentServerHost: - """Create an ConversationAgentServerHost whose invoke handler yields 3 JSON chunks.""" - app = ConversationAgentServerHost(**kwargs) +def _make_streaming_agent(**kwargs: Any) -> WebsocketAgentServerHost: + """Create an WebsocketAgentServerHost whose invoke handler yields 3 JSON chunks.""" + app = WebsocketAgentServerHost(**kwargs) @app.invoke_handler - async def handle(payload: dict, context: ConversationContext): + async def handle(payload: dict, context: WebsocketContext): for i in range(3): yield {"chunk": i} return app -def _make_async_storage_agent(**kwargs: Any) -> ConversationAgentServerHost: - """Create an ConversationAgentServerHost with get/cancel handlers and in-memory store.""" - app = ConversationAgentServerHost(**kwargs) +def _make_async_storage_agent(**kwargs: Any) -> WebsocketAgentServerHost: + """Create an WebsocketAgentServerHost with get/cancel handlers and in-memory store.""" + app = WebsocketAgentServerHost(**kwargs) store: dict[str, dict] = {} @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: - store[context.conversation_id] = payload - return {"stored": True, "conversation_id": context.conversation_id} - - @app.get_conversation_handler - async def get_handler(context: ConversationContext) -> dict: - if context.conversation_id not in store: - raise ConversationError("not_found", "Not found") - return {"data": store[context.conversation_id]} - - @app.cancel_conversation_handler - async def cancel_handler(context: ConversationContext) -> dict: - if context.conversation_id not in store: - raise ConversationError("not_found", "Not found") - del store[context.conversation_id] + async def handle(payload: dict, context: WebsocketContext) -> dict: + store[context.websocket_id] = payload + return {"stored": True, "websocket_id": context.websocket_id} + + @app.get_websocket_handler + async def get_handler(context: WebsocketContext) -> dict: + if context.websocket_id not in store: + raise WebsocketError("not_found", "Not found") + return {"data": store[context.websocket_id]} + + @app.cancel_websocket_handler + async def cancel_handler(context: WebsocketContext) -> dict: + if context.websocket_id not in store: + raise WebsocketError("not_found", "Not found") + del store[context.websocket_id] return {"status": "cancelled"} return app -def _make_validated_agent() -> ConversationAgentServerHost: - """Create an ConversationAgentServerHost with OpenAPI spec.""" - app = ConversationAgentServerHost(openapi_spec=SAMPLE_OPENAPI_SPEC) +def _make_validated_agent() -> WebsocketAgentServerHost: + """Create an WebsocketAgentServerHost with OpenAPI spec.""" + app = WebsocketAgentServerHost(openapi_spec=SAMPLE_OPENAPI_SPEC) @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"reply": f"echo: {payload['message']}"} return app -def _make_failing_agent(**kwargs: Any) -> ConversationAgentServerHost: - """Create an ConversationAgentServerHost whose handler raises ValueError.""" - app = ConversationAgentServerHost(**kwargs) +def _make_failing_agent(**kwargs: Any) -> WebsocketAgentServerHost: + """Create an WebsocketAgentServerHost whose handler raises ValueError.""" + app = WebsocketAgentServerHost(**kwargs) @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: raise ValueError("something went wrong") return app diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_decorator_pattern.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_decorator_pattern.py index 7d71f20b70b2..589599135a9b 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_decorator_pattern.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_decorator_pattern.py @@ -1,13 +1,13 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Tests for decorator-based handler registration on ConversationAgentServerHost.""" +"""Tests for decorator-based handler registration on WebsocketAgentServerHost.""" from starlette.testclient import TestClient -from azure.ai.agentserver.conversations import ( - ConversationAgentServerHost, - ConversationContext, - ConversationError, +from azure.ai.agentserver.websocket import ( + WebsocketAgentServerHost, + WebsocketContext, + WebsocketError, ) @@ -17,10 +17,10 @@ def test_invoke_handler_stores_function(): """@app.invoke_handler stores the function on the protocol object.""" - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"ok": True} assert app._invoke_fn is handle @@ -32,9 +32,9 @@ async def handle(payload: dict, context: ConversationContext) -> dict: def test_invoke_handler_returns_original_function(): """@app.invoke_handler returns the original function.""" - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"ok": True} result = app.invoke_handler(handle) @@ -42,33 +42,33 @@ async def handle(payload: dict, context: ConversationContext) -> dict: # --------------------------------------------------------------------------- -# get_conversation_handler stores function +# get_websocket_handler stores function # --------------------------------------------------------------------------- -def test_get_conversation_handler_stores_function(): - """@app.get_conversation_handler stores the function.""" - app = ConversationAgentServerHost() +def test_get_websocket_handler_stores_function(): + """@app.get_websocket_handler stores the function.""" + app = WebsocketAgentServerHost() - @app.get_conversation_handler - async def get_handler(context: ConversationContext) -> dict: + @app.get_websocket_handler + async def get_handler(context: WebsocketContext) -> dict: return {"ok": True} - assert app._get_conversation_fn is get_handler + assert app._get_websocket_fn is get_handler # --------------------------------------------------------------------------- -# cancel_conversation_handler stores function +# cancel_websocket_handler stores function # --------------------------------------------------------------------------- -def test_cancel_conversation_handler_stores_function(): - """@app.cancel_conversation_handler stores the function.""" - app = ConversationAgentServerHost() +def test_cancel_websocket_handler_stores_function(): + """@app.cancel_websocket_handler stores the function.""" + app = WebsocketAgentServerHost() - @app.cancel_conversation_handler - async def cancel_handler(context: ConversationContext) -> dict: + @app.cancel_websocket_handler + async def cancel_handler(context: WebsocketContext) -> dict: return {"ok": True} - assert app._cancel_conversation_fn is cancel_handler + assert app._cancel_websocket_fn is cancel_handler # --------------------------------------------------------------------------- @@ -77,7 +77,7 @@ async def cancel_handler(context: ConversationContext) -> dict: def test_shutdown_handler_stores_function(): """@server.shutdown_handler stores the function on the server.""" - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() @app.shutdown_handler async def on_shutdown(): @@ -92,49 +92,49 @@ async def on_shutdown(): def test_full_request_flow(): """Full lifecycle: invoke → get → cancel → get (not_found).""" - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() store: dict[str, dict] = {} @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: - store[context.conversation_id] = payload + async def handle(payload: dict, context: WebsocketContext) -> dict: + store[context.websocket_id] = payload return {"stored": True} - @app.get_conversation_handler - async def get_handler(context: ConversationContext) -> dict: - if context.conversation_id not in store: - raise ConversationError("not_found", "Not found") - return {"data": store[context.conversation_id]} - - @app.cancel_conversation_handler - async def cancel_handler(context: ConversationContext) -> dict: - if context.conversation_id not in store: - raise ConversationError("not_found", "Not found") - del store[context.conversation_id] + @app.get_websocket_handler + async def get_handler(context: WebsocketContext) -> dict: + if context.websocket_id not in store: + raise WebsocketError("not_found", "Not found") + return {"data": store[context.websocket_id]} + + @app.cancel_websocket_handler + async def cancel_handler(context: WebsocketContext) -> dict: + if context.websocket_id not in store: + raise WebsocketError("not_found", "Not found") + del store[context.websocket_id] return {"status": "cancelled"} client = TestClient(app) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: # Invoke ws.send_json({"action": "invoke", "payload": {"key": "lifecycle-test"}}) invoke_resp = ws.receive_json() assert invoke_resp["type"] == "result" - inv_id = invoke_resp["conversation_id"] + inv_id = invoke_resp["websocket_id"] # Get - ws.send_json({"action": "get_conversation", "conversation_id": inv_id}) + ws.send_json({"action": "get_websocket", "websocket_id": inv_id}) get_resp = ws.receive_json() assert get_resp["type"] == "result" assert get_resp["payload"]["data"]["key"] == "lifecycle-test" # Cancel - ws.send_json({"action": "cancel_conversation", "conversation_id": inv_id}) + ws.send_json({"action": "cancel_websocket", "websocket_id": inv_id}) cancel_resp = ws.receive_json() assert cancel_resp["type"] == "result" assert cancel_resp["payload"]["status"] == "cancelled" # Get after cancel - ws.send_json({"action": "get_conversation", "conversation_id": inv_id}) + ws.send_json({"action": "get_websocket", "websocket_id": inv_id}) get_resp2 = ws.receive_json() assert get_resp2["type"] == "error" assert get_resp2["error"]["code"] == "not_found" @@ -146,9 +146,9 @@ async def cancel_handler(context: ConversationContext) -> dict: def test_missing_invoke_handler_returns_error(): """Invoke without registered handler returns not_implemented error.""" - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() client = TestClient(app) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "error" @@ -156,32 +156,32 @@ def test_missing_invoke_handler_returns_error(): def test_missing_get_handler_returns_error(): - """get_conversation without registered handler returns not_found error.""" - app = ConversationAgentServerHost() + """get_websocket without registered handler returns not_found error.""" + app = WebsocketAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"ok": True} client = TestClient(app) - with client.websocket_connect("/conversations/ws") as ws: - ws.send_json({"action": "get_conversation", "conversation_id": "some-id"}) + with client.websocket_connect("/websocket/ws") as ws: + ws.send_json({"action": "get_websocket", "websocket_id": "some-id"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "not_found" def test_missing_cancel_handler_returns_error(): - """cancel_conversation without registered handler returns not_found error.""" - app = ConversationAgentServerHost() + """cancel_websocket without registered handler returns not_found error.""" + app = WebsocketAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"ok": True} client = TestClient(app) - with client.websocket_connect("/conversations/ws") as ws: - ws.send_json({"action": "cancel_conversation", "conversation_id": "some-id"}) + with client.websocket_connect("/websocket/ws") as ws: + ws.send_json({"action": "cancel_websocket", "websocket_id": "some-id"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "not_found" @@ -193,17 +193,17 @@ async def handle(payload: dict, context: ConversationContext) -> dict: def test_optional_handlers_default_none(): """Get and cancel handlers default to None.""" - app = ConversationAgentServerHost() - assert app._get_conversation_fn is None - assert app._cancel_conversation_fn is None + app = WebsocketAgentServerHost() + assert app._get_websocket_fn is None + assert app._cancel_websocket_fn is None def test_optional_handler_override(): """Setting an optional handler replaces None.""" - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() - @app.get_conversation_handler - async def get_handler(context: ConversationContext) -> dict: + @app.get_websocket_handler + async def get_handler(context: WebsocketContext) -> dict: return {"ok": True} - assert app._get_conversation_fn is not None + assert app._get_websocket_fn is not None diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_edge_cases.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_edge_cases.py index c7371fb3887f..37e0e5109682 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_edge_cases.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_edge_cases.py @@ -1,14 +1,14 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Edge-case tests for ConversationAgentServerHost over WebSocket.""" +"""Edge-case tests for WebsocketAgentServerHost over WebSocket.""" import uuid from starlette.testclient import TestClient -from azure.ai.agentserver.conversations import ( - ConversationAgentServerHost, - ConversationContext, +from azure.ai.agentserver.websocket import ( + WebsocketAgentServerHost, + WebsocketContext, ) @@ -18,7 +18,7 @@ def test_unknown_action_returns_error(echo_client): """Sending an unknown action returns an error.""" - with echo_client.websocket_connect("/conversations/ws") as ws: + with echo_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "unknown_action", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "error" @@ -31,7 +31,7 @@ def test_unknown_action_returns_error(echo_client): def test_invalid_json_returns_error(echo_client): """Sending invalid JSON returns an error but connection stays open.""" - with echo_client.websocket_connect("/conversations/ws") as ws: + with echo_client.websocket_connect("/websocket/ws") as ws: ws.send_text("not valid json {{{") resp = ws.receive_json() assert resp["type"] == "error" @@ -45,7 +45,7 @@ def test_invalid_json_returns_error(echo_client): def test_non_object_json_returns_error(echo_client): """Sending a JSON array instead of object returns an error.""" - with echo_client.websocket_connect("/conversations/ws") as ws: + with echo_client.websocket_connect("/websocket/ws") as ws: ws.send_text("[1, 2, 3]") resp = ws.receive_json() assert resp["type"] == "error" @@ -53,33 +53,33 @@ def test_non_object_json_returns_error(echo_client): # --------------------------------------------------------------------------- -# Conversation ID handling +# Websocket ID handling # --------------------------------------------------------------------------- -def test_conversation_id_auto_generated(echo_client): - """Conversation ID is auto-generated when not provided.""" - with echo_client.websocket_connect("/conversations/ws") as ws: +def test_websocket_id_auto_generated(echo_client): + """Websocket ID is auto-generated when not provided.""" + with echo_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() - assert "conversation_id" in resp - uuid.UUID(resp["conversation_id"]) + assert "websocket_id" in resp + uuid.UUID(resp["websocket_id"]) -def test_conversation_id_accepted_from_message(echo_client): - """Server accepts conversation ID from message field.""" +def test_websocket_id_accepted_from_message(echo_client): + """Server accepts websocket ID from message field.""" custom_id = str(uuid.uuid4()) - with echo_client.websocket_connect("/conversations/ws") as ws: - ws.send_json({"action": "invoke", "conversation_id": custom_id, "payload": {}}) + with echo_client.websocket_connect("/websocket/ws") as ws: + ws.send_json({"action": "invoke", "websocket_id": custom_id, "payload": {}}) resp = ws.receive_json() - assert resp["conversation_id"] == custom_id + assert resp["websocket_id"] == custom_id -def test_conversation_id_generated_when_empty(echo_client): - """When empty conversation ID is sent, server generates one.""" - with echo_client.websocket_connect("/conversations/ws") as ws: - ws.send_json({"action": "invoke", "conversation_id": "", "payload": {}}) +def test_websocket_id_generated_when_empty(echo_client): + """When empty websocket ID is sent, server generates one.""" + with echo_client.websocket_connect("/websocket/ws") as ws: + ws.send_json({"action": "invoke", "websocket_id": "", "payload": {}}) resp = ws.receive_json() - inv_id = resp["conversation_id"] + inv_id = resp["websocket_id"] uuid.UUID(inv_id) @@ -90,7 +90,7 @@ def test_conversation_id_generated_when_empty(echo_client): def test_large_payload(echo_client): """Large payload (dict with big value) is handled correctly.""" big_value = "x" * (1024 * 1024) - with echo_client.websocket_connect("/conversations/ws") as ws: + with echo_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {"data": big_value}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -100,7 +100,7 @@ def test_large_payload(echo_client): def test_unicode_payload(echo_client): """Unicode payload is preserved.""" text = "Hello, 世界! 🌍" - with echo_client.websocket_connect("/conversations/ws") as ws: + with echo_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {"text": text}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -113,48 +113,48 @@ def test_unicode_payload(echo_client): def test_empty_streaming(): """Empty streaming response (no chunks) sends only stream_end.""" - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: ConversationContext): + async def handle(payload: dict, context: WebsocketContext): return yield # noqa: E501 — make it a generator client = TestClient(app) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "stream_end" -def test_streaming_has_conversation_id(): - """Streaming messages include conversation_id.""" - app = ConversationAgentServerHost() +def test_streaming_has_websocket_id(): + """Streaming messages include websocket_id.""" + app = WebsocketAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: ConversationContext): + async def handle(payload: dict, context: WebsocketContext): yield {"chunk": "data"} client = TestClient(app) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() - assert "conversation_id" in resp + assert "websocket_id" in resp # --------------------------------------------------------------------------- -# Conversation lifecycle +# Websocket lifecycle # --------------------------------------------------------------------------- def test_multiple_gets(async_storage_client): - """Multiple gets for the same conversation return the same result.""" - with async_storage_client.websocket_connect("/conversations/ws") as ws: + """Multiple gets for the same websocket return the same result.""" + with async_storage_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "multi-get"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["conversation_id"] + inv_id = invoke_resp["websocket_id"] for _ in range(3): - ws.send_json({"action": "get_conversation", "conversation_id": inv_id}) + ws.send_json({"action": "get_websocket", "websocket_id": inv_id}) get_resp = ws.receive_json() assert get_resp["type"] == "result" assert get_resp["payload"]["data"]["key"] == "multi-get" @@ -162,16 +162,16 @@ def test_multiple_gets(async_storage_client): def test_double_cancel(async_storage_client): """Cancelling twice: second cancel returns error.""" - with async_storage_client.websocket_connect("/conversations/ws") as ws: + with async_storage_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "cancel-twice"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["conversation_id"] + inv_id = invoke_resp["websocket_id"] - ws.send_json({"action": "cancel_conversation", "conversation_id": inv_id}) + ws.send_json({"action": "cancel_websocket", "websocket_id": inv_id}) cancel1 = ws.receive_json() assert cancel1["type"] == "result" - ws.send_json({"action": "cancel_conversation", "conversation_id": inv_id}) + ws.send_json({"action": "cancel_websocket", "websocket_id": inv_id}) cancel2 = ws.receive_json() assert cancel2["type"] == "error" assert cancel2["error"]["code"] == "not_found" @@ -179,54 +179,54 @@ def test_double_cancel(async_storage_client): def test_invoke_cancel_get(async_storage_client): """Invoke -> cancel -> get returns not_found error.""" - with async_storage_client.websocket_connect("/conversations/ws") as ws: + with async_storage_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "icg"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["conversation_id"] + inv_id = invoke_resp["websocket_id"] - ws.send_json({"action": "cancel_conversation", "conversation_id": inv_id}) + ws.send_json({"action": "cancel_websocket", "websocket_id": inv_id}) ws.receive_json() # consume cancel response - ws.send_json({"action": "get_conversation", "conversation_id": inv_id}) + ws.send_json({"action": "get_websocket", "websocket_id": inv_id}) get_resp = ws.receive_json() assert get_resp["type"] == "error" assert get_resp["error"]["code"] == "not_found" # --------------------------------------------------------------------------- -# Multiple sequential conversations on same connection +# Multiple sequential websocket calls on same connection # --------------------------------------------------------------------------- -def test_multiple_sequential_conversations(echo_client): - """Multiple sequential conversations on the same WebSocket connection.""" - with echo_client.websocket_connect("/conversations/ws") as ws: +def test_multiple_sequential_websocket_calls(echo_client): + """Multiple sequential websocket calls on the same WebSocket connection.""" + with echo_client.websocket_connect("/websocket/ws") as ws: ids = set() for i in range(10): ws.send_json({"action": "invoke", "payload": {"idx": i}}) resp = ws.receive_json() assert resp["type"] == "result" assert resp["payload"]["echo"]["idx"] == i - ids.add(resp["conversation_id"]) + ids.add(resp["websocket_id"]) assert len(ids) == 10 # --------------------------------------------------------------------------- -# get/cancel without conversation_id +# get/cancel without websocket_id # --------------------------------------------------------------------------- -def test_get_without_conversation_id(echo_client): - """get_conversation without conversation_id returns error.""" - with echo_client.websocket_connect("/conversations/ws") as ws: - ws.send_json({"action": "get_conversation"}) +def test_get_without_websocket_id(echo_client): + """get_websocket without websocket_id returns error.""" + with echo_client.websocket_connect("/websocket/ws") as ws: + ws.send_json({"action": "get_websocket"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "invalid_request" -def test_cancel_without_conversation_id(echo_client): - """cancel_conversation without conversation_id returns error.""" - with echo_client.websocket_connect("/conversations/ws") as ws: - ws.send_json({"action": "cancel_conversation"}) +def test_cancel_without_websocket_id(echo_client): + """cancel_websocket without websocket_id returns error.""" + with echo_client.websocket_connect("/websocket/ws") as ws: + ws.send_json({"action": "cancel_websocket"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "invalid_request" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_get_cancel.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_get_cancel.py index cdc7d64377f1..b8629a340810 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_get_cancel.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_get_cancel.py @@ -1,13 +1,13 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Tests for get_conversation and cancel_conversation actions over WebSocket.""" +"""Tests for get_websocket and cancel_websocket actions over WebSocket.""" from starlette.testclient import TestClient -from azure.ai.agentserver.conversations import ( - ConversationAgentServerHost, - ConversationContext, - ConversationError, +from azure.ai.agentserver.websocket import ( + WebsocketAgentServerHost, + WebsocketContext, + WebsocketError, ) @@ -16,13 +16,13 @@ # --------------------------------------------------------------------------- def test_get_after_invoke_returns_stored_result(async_storage_client): - """get_conversation after invoke returns the stored result.""" - with async_storage_client.websocket_connect("/conversations/ws") as ws: + """get_websocket after invoke returns the stored result.""" + with async_storage_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "stored-data"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["conversation_id"] + inv_id = invoke_resp["websocket_id"] - ws.send_json({"action": "get_conversation", "conversation_id": inv_id}) + ws.send_json({"action": "get_websocket", "websocket_id": inv_id}) get_resp = ws.receive_json() assert get_resp["type"] == "result" @@ -34,9 +34,9 @@ def test_get_after_invoke_returns_stored_result(async_storage_client): # --------------------------------------------------------------------------- def test_get_unknown_id_returns_error(async_storage_client): - """get_conversation with unknown ID returns error.""" - with async_storage_client.websocket_connect("/conversations/ws") as ws: - ws.send_json({"action": "get_conversation", "conversation_id": "unknown-id-12345"}) + """get_websocket with unknown ID returns error.""" + with async_storage_client.websocket_connect("/websocket/ws") as ws: + ws.send_json({"action": "get_websocket", "websocket_id": "unknown-id-12345"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "not_found" @@ -47,13 +47,13 @@ def test_get_unknown_id_returns_error(async_storage_client): # --------------------------------------------------------------------------- def test_cancel_after_invoke_returns_cancelled(async_storage_client): - """cancel_conversation after invoke returns cancelled status.""" - with async_storage_client.websocket_connect("/conversations/ws") as ws: + """cancel_websocket after invoke returns cancelled status.""" + with async_storage_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "cancel-me"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["conversation_id"] + inv_id = invoke_resp["websocket_id"] - ws.send_json({"action": "cancel_conversation", "conversation_id": inv_id}) + ws.send_json({"action": "cancel_websocket", "websocket_id": inv_id}) cancel_resp = ws.receive_json() assert cancel_resp["type"] == "result" @@ -65,9 +65,9 @@ def test_cancel_after_invoke_returns_cancelled(async_storage_client): # --------------------------------------------------------------------------- def test_cancel_unknown_id_returns_error(async_storage_client): - """cancel_conversation with unknown ID returns error.""" - with async_storage_client.websocket_connect("/conversations/ws") as ws: - ws.send_json({"action": "cancel_conversation", "conversation_id": "unknown-id-12345"}) + """cancel_websocket with unknown ID returns error.""" + with async_storage_client.websocket_connect("/websocket/ws") as ws: + ws.send_json({"action": "cancel_websocket", "websocket_id": "unknown-id-12345"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "not_found" @@ -78,16 +78,16 @@ def test_cancel_unknown_id_returns_error(async_storage_client): # --------------------------------------------------------------------------- def test_get_after_cancel_returns_error(async_storage_client): - """get_conversation after cancel returns error (data removed).""" - with async_storage_client.websocket_connect("/conversations/ws") as ws: + """get_websocket after cancel returns error (data removed).""" + with async_storage_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "temp"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["conversation_id"] + inv_id = invoke_resp["websocket_id"] - ws.send_json({"action": "cancel_conversation", "conversation_id": inv_id}) + ws.send_json({"action": "cancel_websocket", "websocket_id": inv_id}) ws.receive_json() # consume cancel response - ws.send_json({"action": "get_conversation", "conversation_id": inv_id}) + ws.send_json({"action": "get_websocket", "websocket_id": inv_id}) get_resp = ws.receive_json() assert get_resp["type"] == "error" @@ -98,21 +98,21 @@ def test_get_after_cancel_returns_error(async_storage_client): # GET error returns internal_error # --------------------------------------------------------------------------- -def test_get_conversation_error_returns_internal_error(): - """get_conversation handler raising an exception returns internal_error.""" - app = ConversationAgentServerHost() +def test_get_websocket_error_returns_internal_error(): + """get_websocket handler raising an exception returns internal_error.""" + app = WebsocketAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"ok": True} - @app.get_conversation_handler - async def get_handler(context: ConversationContext) -> dict: + @app.get_websocket_handler + async def get_handler(context: WebsocketContext) -> dict: raise RuntimeError("get failed") client = TestClient(app) - with client.websocket_connect("/conversations/ws") as ws: - ws.send_json({"action": "get_conversation", "conversation_id": "some-id"}) + with client.websocket_connect("/websocket/ws") as ws: + ws.send_json({"action": "get_websocket", "websocket_id": "some-id"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "internal_error" @@ -122,21 +122,21 @@ async def get_handler(context: ConversationContext) -> dict: # Cancel error returns internal_error # --------------------------------------------------------------------------- -def test_cancel_conversation_error_returns_internal_error(): - """cancel_conversation handler raising an exception returns internal_error.""" - app = ConversationAgentServerHost() +def test_cancel_websocket_error_returns_internal_error(): + """cancel_websocket handler raising an exception returns internal_error.""" + app = WebsocketAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"ok": True} - @app.cancel_conversation_handler - async def cancel_handler(context: ConversationContext) -> dict: + @app.cancel_websocket_handler + async def cancel_handler(context: WebsocketContext) -> dict: raise RuntimeError("cancel failed") client = TestClient(app) - with client.websocket_connect("/conversations/ws") as ws: - ws.send_json({"action": "cancel_conversation", "conversation_id": "some-id"}) + with client.websocket_connect("/websocket/ws") as ws: + ws.send_json({"action": "cancel_websocket", "websocket_id": "some-id"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "internal_error" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_graceful_shutdown.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_graceful_shutdown.py index a531d67e3662..c67f9f4315fc 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_graceful_shutdown.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_graceful_shutdown.py @@ -1,16 +1,16 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Tests for graceful shutdown with ConversationAgentServerHost.""" +"""Tests for graceful shutdown with WebsocketAgentServerHost.""" import asyncio import logging import pytest from starlette.testclient import TestClient -from azure.ai.agentserver.conversations import ( - ConversationAgentServerHost, - ConversationContext, +from azure.ai.agentserver.websocket import ( + WebsocketAgentServerHost, + WebsocketContext, ) @@ -18,13 +18,13 @@ # Helpers # --------------------------------------------------------------------------- -def _make_server_with_shutdown(**kwargs) -> tuple[ConversationAgentServerHost, list]: - """Create ConversationAgentServerHost with a tracked shutdown handler.""" - server = ConversationAgentServerHost(**kwargs) +def _make_server_with_shutdown(**kwargs) -> tuple[WebsocketAgentServerHost, list]: + """Create WebsocketAgentServerHost with a tracked shutdown handler.""" + server = WebsocketAgentServerHost(**kwargs) calls: list[str] = [] @server.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"ok": True} @server.shutdown_handler @@ -46,10 +46,10 @@ def test_shutdown_handler_registered(): def test_shutdown_handler_not_registered(): """Without @shutdown_handler, _shutdown_fn is None.""" - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"ok": True} assert app._shutdown_fn is None @@ -100,11 +100,11 @@ async def test_shutdown_handler_called_on_lifespan_exit(): @pytest.mark.asyncio async def test_shutdown_handler_timeout(caplog): """Shutdown handler that exceeds timeout is warned about.""" - server = ConversationAgentServerHost(graceful_shutdown_timeout=1) + server = WebsocketAgentServerHost(graceful_shutdown_timeout=1) calls: list[str] = [] @server.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"ok": True} @server.shutdown_handler @@ -126,10 +126,10 @@ async def on_shutdown(): @pytest.mark.asyncio async def test_shutdown_handler_exception(caplog): """Shutdown handler that raises is caught and logged.""" - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"ok": True} @app.shutdown_handler @@ -148,19 +148,19 @@ async def on_shutdown(): def test_default_graceful_shutdown_timeout(): """Default graceful shutdown timeout is 30 seconds.""" - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() assert app._graceful_shutdown_timeout == 30 def test_custom_graceful_shutdown_timeout(): """Custom graceful_shutdown_timeout is stored.""" - server = ConversationAgentServerHost(graceful_shutdown_timeout=60) + server = WebsocketAgentServerHost(graceful_shutdown_timeout=60) assert server._graceful_shutdown_timeout == 60 def test_zero_graceful_shutdown_timeout(): """Zero timeout disables the drain period.""" - server = ConversationAgentServerHost(graceful_shutdown_timeout=0) + server = WebsocketAgentServerHost(graceful_shutdown_timeout=0) assert server._graceful_shutdown_timeout == 0 @@ -183,14 +183,14 @@ def test_health_endpoint_during_operation(): def test_no_shutdown_handler_is_noop(): """Without a shutdown handler, WebSocket and lifespan work fine.""" - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"ok": True} client = TestClient(app) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -204,7 +204,7 @@ def test_multiple_requests_before_shutdown(): """Multiple requests can be served on the same WebSocket connection.""" server, _ = _make_server_with_shutdown() client = TestClient(server) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: for i in range(5): ws.send_json({"action": "invoke", "payload": {"idx": i}}) resp = ws.receive_json() diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_invoke.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_invoke.py index 827a4fc111aa..f3606e3e98c4 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_invoke.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_invoke.py @@ -4,7 +4,7 @@ """Tests for the invoke action over WebSocket.""" import uuid -from azure.ai.agentserver.conversations import ConversationContext +from azure.ai.agentserver.websocket import WebsocketContext # --------------------------------------------------------------------------- @@ -13,7 +13,7 @@ def test_invoke_echo_payload(echo_client): """Invoke echoes the payload back.""" - with echo_client.websocket_connect("/conversations/ws") as ws: + with echo_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {"msg": "hello world"}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -24,42 +24,42 @@ def test_invoke_echo_payload(echo_client): # IDs # --------------------------------------------------------------------------- -def test_invoke_returns_conversation_id(echo_client): - """Response includes a valid UUID conversation_id.""" - with echo_client.websocket_connect("/conversations/ws") as ws: +def test_invoke_returns_websocket_id(echo_client): + """Response includes a valid UUID websocket_id.""" + with echo_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() - assert "conversation_id" in resp - uuid.UUID(resp["conversation_id"]) + assert "websocket_id" in resp + uuid.UUID(resp["websocket_id"]) def test_invoke_returns_session_id(echo_client): """Response includes a valid UUID session_id.""" - with echo_client.websocket_connect("/conversations/ws") as ws: + with echo_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert "session_id" in resp uuid.UUID(resp["session_id"]) -def test_invoke_unique_conversation_ids(echo_client): - """Each invoke gets a unique conversation ID.""" +def test_invoke_unique_websocket_ids(echo_client): + """Each invoke gets a unique websocket ID.""" ids = set() - with echo_client.websocket_connect("/conversations/ws") as ws: + with echo_client.websocket_connect("/websocket/ws") as ws: for _ in range(5): ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() - ids.add(resp["conversation_id"]) + ids.add(resp["websocket_id"]) assert len(ids) == 5 -def test_invoke_accepts_custom_conversation_id(echo_client): - """If the message includes conversation_id, the server uses it.""" +def test_invoke_accepts_custom_websocket_id(echo_client): + """If the message includes websocket_id, the server uses it.""" custom_id = str(uuid.uuid4()) - with echo_client.websocket_connect("/conversations/ws") as ws: - ws.send_json({"action": "invoke", "conversation_id": custom_id, "payload": {}}) + with echo_client.websocket_connect("/websocket/ws") as ws: + ws.send_json({"action": "invoke", "websocket_id": custom_id, "payload": {}}) resp = ws.receive_json() - assert resp["conversation_id"] == custom_id + assert resp["websocket_id"] == custom_id # --------------------------------------------------------------------------- @@ -68,7 +68,7 @@ def test_invoke_accepts_custom_conversation_id(echo_client): def test_streaming_returns_chunks(streaming_client): """Streaming handler yields 3 chunks then stream_end.""" - with streaming_client.websocket_connect("/conversations/ws") as ws: + with streaming_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) chunks = [] while True: @@ -82,13 +82,13 @@ def test_streaming_returns_chunks(streaming_client): assert chunk == {"chunk": i} -def test_streaming_has_conversation_id(streaming_client): - """Streaming messages include conversation_id.""" - with streaming_client.websocket_connect("/conversations/ws") as ws: +def test_streaming_has_websocket_id(streaming_client): + """Streaming messages include websocket_id.""" + with streaming_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() - assert "conversation_id" in resp - uuid.UUID(resp["conversation_id"]) + assert "websocket_id" in resp + uuid.UUID(resp["websocket_id"]) # --------------------------------------------------------------------------- @@ -97,7 +97,7 @@ def test_streaming_has_conversation_id(streaming_client): def test_invoke_empty_payload(echo_client): """Empty payload doesn't crash the server.""" - with echo_client.websocket_connect("/conversations/ws") as ws: + with echo_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -109,7 +109,7 @@ def test_invoke_empty_payload(echo_client): def test_invoke_error_returns_error(failing_client): """Handler exception returns error message.""" - with failing_client.websocket_connect("/conversations/ws") as ws: + with failing_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "error" @@ -117,17 +117,17 @@ def test_invoke_error_returns_error(failing_client): assert resp["error"]["message"] == "Internal server error" -def test_invoke_error_has_conversation_id(failing_client): - """Error response still includes conversation_id.""" - with failing_client.websocket_connect("/conversations/ws") as ws: +def test_invoke_error_has_websocket_id(failing_client): + """Error response still includes websocket_id.""" + with failing_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() - assert "conversation_id" in resp + assert "websocket_id" in resp def test_error_hides_details_by_default(failing_client): """Exception message is hidden in error responses.""" - with failing_client.websocket_connect("/conversations/ws") as ws: + with failing_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert "something went wrong" not in resp["error"]["message"] diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_multimodal_protocol.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_multimodal_protocol.py index 701d8f0a5c4d..7f0aff0eb992 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_multimodal_protocol.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_multimodal_protocol.py @@ -1,14 +1,14 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Tests for varied payloads with ConversationAgentServerHost over WebSocket.""" +"""Tests for varied payloads with WebsocketAgentServerHost over WebSocket.""" import base64 from starlette.testclient import TestClient -from azure.ai.agentserver.conversations import ( - ConversationAgentServerHost, - ConversationContext, +from azure.ai.agentserver.websocket import ( + WebsocketAgentServerHost, + WebsocketContext, ) @@ -16,12 +16,12 @@ # Helper: echo agent with content type tracking # --------------------------------------------------------------------------- -def _make_content_type_echo_agent() -> ConversationAgentServerHost: +def _make_content_type_echo_agent() -> WebsocketAgentServerHost: """Agent that echoes payload and notes the content_type field.""" - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return { "echo": payload, "received_content_type": payload.get("content_type", "unknown"), @@ -30,12 +30,12 @@ async def handle(payload: dict, context: ConversationContext) -> dict: return app -def _make_sse_agent() -> ConversationAgentServerHost: +def _make_sse_agent() -> WebsocketAgentServerHost: """Agent that returns streaming chunks.""" - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: ConversationContext): + async def handle(payload: dict, context: WebsocketContext): for i in range(3): yield {"event": i} @@ -51,7 +51,7 @@ def test_png_payload(): server = _make_content_type_echo_agent() client = TestClient(server) fake_png = b"\x89PNG\r\n\x1a\n" + b"\x00" * 100 - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({ "action": "invoke", "payload": { @@ -70,7 +70,7 @@ def test_jpeg_payload(): server = _make_content_type_echo_agent() client = TestClient(server) fake_jpeg = b"\xff\xd8\xff\xe0" + b"\x00" * 100 - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({ "action": "invoke", "payload": { @@ -88,7 +88,7 @@ def test_wav_payload(): server = _make_content_type_echo_agent() client = TestClient(server) fake_wav = b"RIFF" + b"\x00" * 100 - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({ "action": "invoke", "payload": { @@ -105,7 +105,7 @@ def test_text_plain_payload(): """text/plain content type payload is accepted.""" server = _make_content_type_echo_agent() client = TestClient(server) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({ "action": "invoke", "payload": { @@ -124,14 +124,14 @@ def test_text_plain_payload(): def test_params_in_payload(): """Arbitrary parameters are accessible in the handler payload.""" - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"name": payload.get("name", "unknown")} client = TestClient(app) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {"name": "Alice"}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -146,7 +146,7 @@ def test_streaming_chunks(): """Streaming response sends multiple chunks.""" server = _make_sse_agent() client = TestClient(server) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) chunks = [] while True: @@ -166,10 +166,10 @@ def test_streaming_chunks(): def test_health_endpoint_returns_200(): """GET /readiness returns 200 with healthy status.""" - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"ok": True} client = TestClient(app) diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_request_limits.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_request_limits.py index 537187d831a6..3695258f7567 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_request_limits.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_request_limits.py @@ -7,20 +7,20 @@ import pytest from starlette.testclient import TestClient -from azure.ai.agentserver.conversations import ( - ConversationAgentServerHost, - ConversationContext, +from azure.ai.agentserver.websocket import ( + WebsocketAgentServerHost, + WebsocketContext, ) # --------------------------------------------------------------------------- -# ConversationAgentServerHost no longer accepts request_timeout +# WebsocketAgentServerHost no longer accepts request_timeout # --------------------------------------------------------------------------- def test_no_request_timeout_parameter(): - """ConversationAgentServerHost no longer accepts request_timeout.""" + """WebsocketAgentServerHost no longer accepts request_timeout.""" with pytest.raises(TypeError): - ConversationAgentServerHost(request_timeout=10) + WebsocketAgentServerHost(request_timeout=10) # --------------------------------------------------------------------------- @@ -29,15 +29,15 @@ def test_no_request_timeout_parameter(): def test_slow_invoke_completes(): """Without timeout, handler runs to completion.""" - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: await asyncio.sleep(0.1) return {"status": "done"} client = TestClient(app) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "result" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_server_routes.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_server_routes.py index 0e09060d246c..2fc9a571db4d 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_server_routes.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_server_routes.py @@ -1,41 +1,41 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Tests for basic server route registration with ConversationAgentServerHost.""" +"""Tests for basic server route registration with WebsocketAgentServerHost.""" import uuid from starlette.testclient import TestClient -from azure.ai.agentserver.conversations import ( - ConversationAgentServerHost, - ConversationContext, +from azure.ai.agentserver.websocket import ( + WebsocketAgentServerHost, + WebsocketContext, ) from conftest import SAMPLE_OPENAPI_SPEC # --------------------------------------------------------------------------- -# WebSocket connection /conversations/ws +# WebSocket connection /websocket/ws # --------------------------------------------------------------------------- def test_websocket_invoke_returns_result(echo_client): """Invoke via WebSocket returns a result.""" - with echo_client.websocket_connect("/conversations/ws") as ws: + with echo_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {"test": True}}) resp = ws.receive_json() assert resp["type"] == "result" # --------------------------------------------------------------------------- -# Conversation ID is valid UUID +# Websocket ID is valid UUID # --------------------------------------------------------------------------- -def test_invoke_returns_uuid_conversation_id(echo_client): - """Invoke returns a valid UUID conversation ID.""" - with echo_client.websocket_connect("/conversations/ws") as ws: +def test_invoke_returns_uuid_websocket_id(echo_client): + """Invoke returns a valid UUID websocket ID.""" + with echo_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() - inv_id = resp["conversation_id"] + inv_id = resp["websocket_id"] parsed = uuid.UUID(inv_id) assert str(parsed) == inv_id @@ -45,8 +45,8 @@ def test_invoke_returns_uuid_conversation_id(echo_client): # --------------------------------------------------------------------------- def test_get_openapi_spec_returns_404_when_not_set(no_spec_client): - """GET /conversations/docs/openapi.json returns 404 when no spec registered.""" - resp = no_spec_client.get("/conversations/docs/openapi.json") + """GET /websocket/docs/openapi.json returns 404 when no spec registered.""" + resp = no_spec_client.get("/websocket/docs/openapi.json") assert resp.status_code == 404 @@ -55,27 +55,27 @@ def test_get_openapi_spec_returns_404_when_not_set(no_spec_client): # --------------------------------------------------------------------------- def test_get_openapi_spec_returns_spec_when_registered(): - """GET /conversations/docs/openapi.json returns the spec when registered.""" - app = ConversationAgentServerHost(openapi_spec=SAMPLE_OPENAPI_SPEC) + """GET /websocket/docs/openapi.json returns the spec when registered.""" + app = WebsocketAgentServerHost(openapi_spec=SAMPLE_OPENAPI_SPEC) @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"ok": True} client = TestClient(app) - resp = client.get("/conversations/docs/openapi.json") + resp = client.get("/websocket/docs/openapi.json") assert resp.status_code == 200 assert resp.json() == SAMPLE_OPENAPI_SPEC # --------------------------------------------------------------------------- -# get_conversation returns not_found error by default +# get_websocket returns not_found error by default # --------------------------------------------------------------------------- -def test_get_conversation_returns_not_found_default(echo_client): - """get_conversation without handler returns not_found error.""" - with echo_client.websocket_connect("/conversations/ws") as ws: - ws.send_json({"action": "get_conversation", "conversation_id": "some-id"}) +def test_get_websocket_returns_not_found_default(echo_client): + """get_websocket without handler returns not_found error.""" + with echo_client.websocket_connect("/websocket/ws") as ws: + ws.send_json({"action": "get_websocket", "websocket_id": "some-id"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "not_found" @@ -85,10 +85,10 @@ def test_get_conversation_returns_not_found_default(echo_client): # cancel returns not_found error by default # --------------------------------------------------------------------------- -def test_cancel_conversation_returns_not_found_default(echo_client): - """cancel_conversation without handler returns not_found error.""" - with echo_client.websocket_connect("/conversations/ws") as ws: - ws.send_json({"action": "cancel_conversation", "conversation_id": "some-id"}) +def test_cancel_websocket_returns_not_found_default(echo_client): + """cancel_websocket without handler returns not_found error.""" + with echo_client.websocket_connect("/websocket/ws") as ws: + ws.send_json({"action": "cancel_websocket", "websocket_id": "some-id"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "not_found" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_session_id.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_session_id.py index 97977621f055..1b3422727538 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_session_id.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_session_id.py @@ -8,9 +8,9 @@ from starlette.testclient import TestClient -from azure.ai.agentserver.conversations import ( - ConversationAgentServerHost, - ConversationContext, +from azure.ai.agentserver.websocket import ( + WebsocketAgentServerHost, + WebsocketContext, ) @@ -20,7 +20,7 @@ def test_invoke_has_session_id(echo_client): """Invoke response includes session_id.""" - with echo_client.websocket_connect("/conversations/ws") as ws: + with echo_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert "session_id" in resp @@ -33,14 +33,14 @@ def test_invoke_has_session_id(echo_client): def test_invoke_with_session_id_in_message(): """Invoke with session_id in message uses that value.""" - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"ok": True} client = TestClient(app) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({ "action": "invoke", "session_id": "my-custom-session", @@ -56,49 +56,49 @@ async def handle(payload: dict, context: ConversationContext) -> dict: def test_invoke_uses_env_var(): """Invoke uses FOUNDRY_AGENT_SESSION_ID env var when no session_id in message.""" - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"ok": True} client = TestClient(app) with patch.dict(os.environ, {"FOUNDRY_AGENT_SESSION_ID": "env-session"}): - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["session_id"] == "env-session" # --------------------------------------------------------------------------- -# get_conversation does NOT include session_id (not part of get protocol) +# get_websocket does NOT include session_id (not part of get protocol) # --------------------------------------------------------------------------- -def test_get_conversation_no_session_id(async_storage_client): - """get_conversation response does not include session_id.""" - with async_storage_client.websocket_connect("/conversations/ws") as ws: +def test_get_websocket_no_session_id(async_storage_client): + """get_websocket response does not include session_id.""" + with async_storage_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "data"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["conversation_id"] + inv_id = invoke_resp["websocket_id"] - ws.send_json({"action": "get_conversation", "conversation_id": inv_id}) + ws.send_json({"action": "get_websocket", "websocket_id": inv_id}) get_resp = ws.receive_json() assert get_resp["type"] == "result" assert "session_id" not in get_resp # --------------------------------------------------------------------------- -# cancel_conversation does NOT include session_id +# cancel_websocket does NOT include session_id # --------------------------------------------------------------------------- -def test_cancel_conversation_no_session_id(async_storage_client): - """cancel_conversation response does not include session_id.""" - with async_storage_client.websocket_connect("/conversations/ws") as ws: +def test_cancel_websocket_no_session_id(async_storage_client): + """cancel_websocket response does not include session_id.""" + with async_storage_client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "data"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["conversation_id"] + inv_id = invoke_resp["websocket_id"] - ws.send_json({"action": "cancel_conversation", "conversation_id": inv_id}) + ws.send_json({"action": "cancel_websocket", "websocket_id": inv_id}) cancel_resp = ws.receive_json() assert cancel_resp["type"] == "result" assert "session_id" not in cancel_resp diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_span_parenting.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_span_parenting.py index ef2c7ee5d10a..fbf0aecf9de3 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_span_parenting.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_span_parenting.py @@ -10,9 +10,9 @@ import pytest from starlette.testclient import TestClient -from azure.ai.agentserver.conversations import ( - ConversationAgentServerHost, - ConversationContext, +from azure.ai.agentserver.websocket import ( + WebsocketAgentServerHost, + WebsocketContext, ) @@ -51,11 +51,11 @@ def _make_server_with_child_span(): """Server whose handler creates a child span (simulating a framework).""" with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() child_tracer = trace.get_tracer("test.framework") @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: with child_tracer.start_as_current_span("framework_invoke_agent") as _span: return {"ok": True} @@ -66,11 +66,11 @@ def _make_streaming_server_with_child_span(): """Server with streaming response whose handler creates a child span.""" with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() child_tracer = trace.get_tracer("test.framework") @app.invoke_handler - async def handle(payload: dict, context: ConversationContext): + async def handle(payload: dict, context: WebsocketContext): with child_tracer.start_as_current_span("framework_invoke_agent"): yield {"chunk": "data"} @@ -102,7 +102,7 @@ def test_framework_span_is_child_of_invoke_span(): agentserver invoke_agent span, not a sibling.""" server = _make_server_with_child_span() client = TestClient(server) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -114,7 +114,7 @@ def test_framework_span_is_child_streaming(): """Same parent-child relationship holds for streaming responses.""" server = _make_streaming_server_with_child_span() client = TestClient(server) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) while True: resp = ws.receive_json() diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_tracing.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_tracing.py index a23ed4716b6e..341dd719cc57 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_tracing.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_tracing.py @@ -1,7 +1,7 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Tests for OpenTelemetry tracing in the WebSocket conversations protocol.""" +"""Tests for OpenTelemetry tracing in the WebSocket websocket protocol.""" import os import uuid from unittest.mock import patch @@ -9,10 +9,10 @@ import pytest from starlette.testclient import TestClient -from azure.ai.agentserver.conversations import ( - ConversationAgentServerHost, - ConversationContext, - ConversationError, +from azure.ai.agentserver.websocket import ( + WebsocketAgentServerHost, + WebsocketContext, + WebsocketError, ) @@ -68,13 +68,13 @@ def _get_spans(): # --------------------------------------------------------------------------- def _make_tracing_server(**kwargs): - """Create an ConversationAgentServerHost with tracing enabled.""" + """Create an WebsocketAgentServerHost with tracing enabled.""" with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): - server = ConversationAgentServerHost(**kwargs) + server = WebsocketAgentServerHost(**kwargs) @server.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"echo": payload} return server @@ -84,27 +84,27 @@ def _make_tracing_server_with_get_cancel(**kwargs): """Create a tracing-enabled server with get/cancel handlers.""" with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): - server = ConversationAgentServerHost(**kwargs) + server = WebsocketAgentServerHost(**kwargs) store: dict[str, dict] = {} @server.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: - store[context.conversation_id] = payload + async def handle(payload: dict, context: WebsocketContext) -> dict: + store[context.websocket_id] = payload return {"stored": True} - @server.get_conversation_handler - async def get_handler(context: ConversationContext) -> dict: - if context.conversation_id in store: - return {"data": store[context.conversation_id]} - raise ConversationError("not_found", "Not found") + @server.get_websocket_handler + async def get_handler(context: WebsocketContext) -> dict: + if context.websocket_id in store: + return {"data": store[context.websocket_id]} + raise WebsocketError("not_found", "Not found") - @server.cancel_conversation_handler - async def cancel_handler(context: ConversationContext) -> dict: - if context.conversation_id in store: - del store[context.conversation_id] + @server.cancel_websocket_handler + async def cancel_handler(context: WebsocketContext) -> dict: + if context.websocket_id in store: + del store[context.websocket_id] return {"status": "cancelled"} - raise ConversationError("not_found", "Not found") + raise WebsocketError("not_found", "Not found") return server @@ -113,10 +113,10 @@ def _make_failing_tracing_server(**kwargs): """Create a tracing-enabled server whose handler raises.""" with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): - server = ConversationAgentServerHost(**kwargs) + server = WebsocketAgentServerHost(**kwargs) @server.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: raise ValueError("tracing error test") return server @@ -126,10 +126,10 @@ def _make_streaming_tracing_server(**kwargs): """Create a tracing-enabled server with streaming response.""" with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): - server = ConversationAgentServerHost(**kwargs) + server = WebsocketAgentServerHost(**kwargs) @server.invoke_handler - async def handle(payload: dict, context: ConversationContext): + async def handle(payload: dict, context: WebsocketContext): yield {"chunk": 1} yield {"chunk": 2} @@ -145,14 +145,14 @@ def test_tracing_disabled_by_default(): if _MODULE_EXPORTER: _MODULE_EXPORTER.clear() - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"ok": True} client = TestClient(app) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) ws.receive_json() @@ -169,7 +169,7 @@ def test_tracing_enabled_creates_invoke_span(): """Tracing enabled creates a span named 'invoke_agent'.""" server = _make_tracing_server() client = TestClient(server) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) ws.receive_json() @@ -187,7 +187,7 @@ def test_invoke_error_records_exception(): """When handler raises, the span records the exception.""" server = _make_failing_tracing_server() client = TestClient(server) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "error" @@ -203,37 +203,37 @@ def test_invoke_error_records_exception(): # GET/cancel create spans # --------------------------------------------------------------------------- -def test_get_conversation_creates_span(): - """get_conversation creates a span.""" +def test_get_websocket_creates_span(): + """get_websocket creates a span.""" server = _make_tracing_server_with_get_cancel() client = TestClient(server) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "data"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["conversation_id"] + inv_id = invoke_resp["websocket_id"] - ws.send_json({"action": "get_conversation", "conversation_id": inv_id}) + ws.send_json({"action": "get_websocket", "websocket_id": inv_id}) ws.receive_json() spans = _get_spans() - get_spans = [s for s in spans if "get_conversation" in s.name] + get_spans = [s for s in spans if "get_websocket" in s.name] assert len(get_spans) >= 1 -def test_cancel_conversation_creates_span(): - """cancel_conversation creates a span.""" +def test_cancel_websocket_creates_span(): + """cancel_websocket creates a span.""" server = _make_tracing_server_with_get_cancel() client = TestClient(server) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "data"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["conversation_id"] + inv_id = invoke_resp["websocket_id"] - ws.send_json({"action": "cancel_conversation", "conversation_id": inv_id}) + ws.send_json({"action": "cancel_websocket", "websocket_id": inv_id}) ws.receive_json() spans = _get_spans() - cancel_spans = [s for s in spans if "cancel_conversation" in s.name] + cancel_spans = [s for s in spans if "cancel_websocket" in s.name] assert len(cancel_spans) >= 1 @@ -245,14 +245,14 @@ def test_tracing_via_appinsights_env_var(): """Tracing is enabled when APPLICATIONINSIGHTS_CONNECTION_STRING is set.""" with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"ok": True} client = TestClient(app) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) ws.receive_json() @@ -271,17 +271,17 @@ def test_no_tracing_when_no_endpoints(): env.pop("APPLICATIONINSIGHTS_CONNECTION_STRING", None) env.pop("OTEL_EXPORTER_OTLP_ENDPOINT", None) with patch.dict(os.environ, env, clear=True): - app = ConversationAgentServerHost() + app = WebsocketAgentServerHost() @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"ok": True} if _MODULE_EXPORTER: _MODULE_EXPORTER.clear() client = TestClient(app) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) ws.receive_json() @@ -298,7 +298,7 @@ def test_streaming_creates_span(): """Streaming response creates and completes a span.""" server = _make_streaming_tracing_server() client = TestClient(server) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) # Consume all streaming messages while True: @@ -319,7 +319,7 @@ def test_genai_attributes_on_invoke_span(): """Invoke span has GenAI semantic convention attributes.""" server = _make_tracing_server() client = TestClient(server) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) ws.receive_json() @@ -337,11 +337,11 @@ def test_genai_attributes_on_invoke_span(): # Session ID in gen_ai.conversation.id # --------------------------------------------------------------------------- -def test_session_id_in_conversation_id(): +def test_session_id_in_websocket_id(): """Session ID is set as gen_ai.conversation.id on invoke span.""" server = _make_tracing_server() client = TestClient(server) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({ "action": "invoke", "session_id": "test-session", diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_ws_keepalive.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_ws_keepalive.py index f6c9299f2dd3..18714b2a50e4 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_ws_keepalive.py +++ b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_ws_keepalive.py @@ -4,17 +4,17 @@ """Tests for WebSocket ping/pong keep-alive.""" from starlette.testclient import TestClient -from azure.ai.agentserver.conversations import ( - ConversationAgentServerHost, - ConversationContext, +from azure.ai.agentserver.websocket import ( + WebsocketAgentServerHost, + WebsocketContext, ) def _make_echo_app(**kwargs): - app = ConversationAgentServerHost(**kwargs) + app = WebsocketAgentServerHost(**kwargs) @app.invoke_handler - async def handle(payload: dict, context: ConversationContext) -> dict: + async def handle(payload: dict, context: WebsocketContext) -> dict: return {"echo": payload} return app @@ -27,16 +27,16 @@ async def handle(payload: dict, context: ConversationContext) -> dict: def test_client_ping_gets_pong(): """Server replies with pong when client sends a ping action.""" client = TestClient(_make_echo_app()) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "ping"}) resp = ws.receive_json() assert resp["type"] == "pong" def test_client_ping_does_not_interrupt_invoke(): - """A ping/pong exchange between conversations doesn't break the connection.""" + """A ping/pong exchange between websocket calls doesn't break the connection.""" client = TestClient(_make_echo_app()) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: # Normal invoke ws.send_json({"action": "invoke", "payload": {"n": 1}}) r1 = ws.receive_json() @@ -57,7 +57,7 @@ def test_client_ping_does_not_interrupt_invoke(): def test_client_pong_is_accepted_silently(): """Server accepts pong action without returning an error.""" client = TestClient(_make_echo_app()) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "pong"}) # No response expected for pong — verify next invoke still works. ws.send_json({"action": "invoke", "payload": {"ok": True}}) @@ -73,7 +73,7 @@ def test_ping_disabled_with_zero_interval(): """Setting ws_ping_interval=0 disables the background ping task.""" app = _make_echo_app(ws_ping_interval=0) client = TestClient(app) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -83,7 +83,7 @@ def test_custom_ping_interval(): """A custom ws_ping_interval is accepted without error.""" app = _make_echo_app(ws_ping_interval=15) client = TestClient(app) - with client.websocket_connect("/conversations/ws") as ws: + with client.websocket_connect("/websocket/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "result" From 336d4c2b49021f63a0737c81c9e7e2ef91d5ba1a Mon Sep 17 00:00:00 2001 From: Xinran Date: Wed, 6 May 2026 07:15:32 +0000 Subject: [PATCH 05/10] =?UTF-8?q?Use=20a=20single=20=E2=80=9Cinvocations?= =?UTF-8?q?=E2=80=9D=20umbrella=20for=20BYO=20protocols;=20WebSocket=20fal?= =?UTF-8?q?ls=20under=20it?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../CHANGELOG.md | 9 + .../README.md | 257 ++++++++++++- .../ai/agentserver/invocations/__init__.py | 24 +- .../invocations/_invocation_ws.py} | 352 +++++++++--------- .../agentserver/invocations/_ws_constants.py} | 19 +- .../pyproject.toml | 2 +- .../streaming_ws_invoke_agent}/README.md | 27 +- .../streaming_ws_invoke_agent}/agent.yaml | 2 +- .../browser_client/client.py | 0 .../browser_client/index.html | 6 +- .../streaming_ws_invoke_agent}/main.py | 65 ++-- .../requirements.txt | 2 + .../tests/conftest.py | 16 + .../tests/conftest_ws.py | 192 ++++++++++ .../tests/test_ws_decorator_pattern.py | 209 +++++++++++ .../tests/test_ws_edge_cases.py} | 144 +++---- .../tests/test_ws_get_cancel.py} | 94 ++--- .../tests/test_ws_graceful_shutdown.py} | 74 ++-- .../tests/test_ws_invoke.py} | 74 ++-- .../tests/test_ws_keepalive.py | 36 +- .../tests/test_ws_multimodal_protocol.py} | 62 +-- .../tests/test_ws_request_limits.py} | 24 +- .../tests/test_ws_server_routes.py} | 68 ++-- .../tests/test_ws_session_id.py} | 56 +-- .../tests/test_ws_span_parenting.py} | 35 +- .../tests/test_ws_tracing.py} | 135 ++++--- .../CHANGELOG.md | 15 - .../azure-ai-agentserver-websocket/LICENSE | 21 -- .../MANIFEST.in | 8 - .../azure-ai-agentserver-websocket/README.md | 292 --------------- .../azure/__init__.py | 1 - .../azure/ai/__init__.py | 1 - .../azure/ai/agentserver/__init__.py | 1 - .../ai/agentserver/websocket/__init__.py | 28 -- .../ai/agentserver/websocket/_version.py | 5 - .../azure/ai/agentserver/websocket/py.typed | 0 .../cspell.json | 26 -- .../dev_requirements.txt | 7 - .../pyproject.toml | 68 ---- .../pyrightconfig.json | 11 - .../streaming_echo_agent/.dockerignore | 6 - .../samples/streaming_echo_agent/Dockerfile | 21 -- .../streaming_echo_agent/agent.manifest.yaml | 16 - .../streaming_echo_agent/requirements.txt | 3 - .../tests/conftest.py | 192 ---------- .../tests/test_decorator_pattern.py | 209 ----------- 46 files changed, 1338 insertions(+), 1577 deletions(-) rename sdk/agentserver/{azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/_websocket.py => azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_invocation_ws.py} (60%) rename sdk/agentserver/{azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/_constants.py => azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_ws_constants.py} (53%) rename sdk/agentserver/{azure-ai-agentserver-websocket/samples/streaming_echo_agent => azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent}/README.md (63%) rename sdk/agentserver/{azure-ai-agentserver-websocket/samples/streaming_echo_agent => azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent}/agent.yaml (79%) rename sdk/agentserver/{azure-ai-agentserver-websocket/samples => azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent}/browser_client/client.py (100%) rename sdk/agentserver/{azure-ai-agentserver-websocket/samples => azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent}/browser_client/index.html (98%) rename sdk/agentserver/{azure-ai-agentserver-websocket/samples/streaming_echo_agent => azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent}/main.py (68%) create mode 100644 sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/requirements.txt create mode 100644 sdk/agentserver/azure-ai-agentserver-invocations/tests/conftest_ws.py create mode 100644 sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_decorator_pattern.py rename sdk/agentserver/{azure-ai-agentserver-websocket/tests/test_edge_cases.py => azure-ai-agentserver-invocations/tests/test_ws_edge_cases.py} (56%) rename sdk/agentserver/{azure-ai-agentserver-websocket/tests/test_get_cancel.py => azure-ai-agentserver-invocations/tests/test_ws_get_cancel.py} (50%) rename sdk/agentserver/{azure-ai-agentserver-websocket/tests/test_graceful_shutdown.py => azure-ai-agentserver-invocations/tests/test_ws_graceful_shutdown.py} (74%) rename sdk/agentserver/{azure-ai-agentserver-websocket/tests/test_invoke.py => azure-ai-agentserver-invocations/tests/test_ws_invoke.py} (58%) rename sdk/agentserver/{azure-ai-agentserver-websocket => azure-ai-agentserver-invocations}/tests/test_ws_keepalive.py (71%) rename sdk/agentserver/{azure-ai-agentserver-websocket/tests/test_multimodal_protocol.py => azure-ai-agentserver-invocations/tests/test_ws_multimodal_protocol.py} (76%) rename sdk/agentserver/{azure-ai-agentserver-websocket/tests/test_request_limits.py => azure-ai-agentserver-invocations/tests/test_ws_request_limits.py} (64%) rename sdk/agentserver/{azure-ai-agentserver-websocket/tests/test_server_routes.py => azure-ai-agentserver-invocations/tests/test_ws_server_routes.py} (52%) rename sdk/agentserver/{azure-ai-agentserver-websocket/tests/test_session_id.py => azure-ai-agentserver-invocations/tests/test_ws_session_id.py} (61%) rename sdk/agentserver/{azure-ai-agentserver-websocket/tests/test_span_parenting.py => azure-ai-agentserver-invocations/tests/test_ws_span_parenting.py} (81%) rename sdk/agentserver/{azure-ai-agentserver-websocket/tests/test_tracing.py => azure-ai-agentserver-invocations/tests/test_ws_tracing.py} (73%) delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/CHANGELOG.md delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/LICENSE delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/MANIFEST.in delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/README.md delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/azure/__init__.py delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/__init__.py delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/__init__.py delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/__init__.py delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/_version.py delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/py.typed delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/cspell.json delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/dev_requirements.txt delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/pyproject.toml delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/pyrightconfig.json delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/.dockerignore delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/Dockerfile delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.manifest.yaml delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/requirements.txt delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/tests/conftest.py delete mode 100644 sdk/agentserver/azure-ai-agentserver-websocket/tests/test_decorator_pattern.py diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/CHANGELOG.md b/sdk/agentserver/azure-ai-agentserver-invocations/CHANGELOG.md index 6ead3c39d58d..f6796faf519c 100644 --- a/sdk/agentserver/azure-ai-agentserver-invocations/CHANGELOG.md +++ b/sdk/agentserver/azure-ai-agentserver-invocations/CHANGELOG.md @@ -4,6 +4,15 @@ ### Features Added +- Added WebSocket invocation protocol (`invocations_ws`) — merged from the standalone `azure-ai-agentserver-websocket` package. + - New `InvocationWSAgentServerHost` class exposing a single persistent WebSocket endpoint at `/invocations_ws/ws` for invoke / get_invocation / cancel_invocation actions, with built-in streaming support via async generators. + - New `InvocationWSContext` and `InvocationWSError` types passed to handler functions. + - Decorator-based handler registration: `@app.ws_invoke_handler`, `@app.ws_get_invocation_handler`, `@app.ws_cancel_invocation_handler`. + - Built-in WebSocket keep-alive with configurable `ws_ping_interval` (default 30 s) to survive Azure APIM / Load Balancer idle timeouts. + - OpenAPI spec discovery endpoint at `GET /invocations_ws/docs/openapi.json`. + - Distributed tracing with GenAI semantic-convention spans (`invoke_agent`, `get_invocation`, `cancel_invocation`) and span attributes under the `azure.ai.agentserver.invocations_ws.*` namespace. + - Cooperative multiple inheritance with `InvocationAgentServerHost` so a single host can serve both HTTP (`invocations`) and WebSocket (`invocations_ws`) protocols. + ### Breaking Changes ### Bugs Fixed diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/README.md b/sdk/agentserver/azure-ai-agentserver-invocations/README.md index 5e9dfe515657..b76cbe2c4277 100644 --- a/sdk/agentserver/azure-ai-agentserver-invocations/README.md +++ b/sdk/agentserver/azure-ai-agentserver-invocations/README.md @@ -1,6 +1,9 @@ # Azure AI Agent Server Invocations client library for Python -The `azure-ai-agentserver-invocations` package provides the invocation protocol endpoints for Azure AI Hosted Agent containers. It plugs into the [`azure-ai-agentserver-core`](https://pypi.org/project/azure-ai-agentserver-core/) host framework and adds the full invocation lifecycle: `POST /invocations`, `GET /invocations/{id}`, `POST /invocations/{id}/cancel`, and `GET /invocations/docs/openapi.json`. +The `azure-ai-agentserver-invocations` package provides the invocation protocol endpoints for Azure AI Hosted Agent containers. It plugs into the [`azure-ai-agentserver-core`](https://pypi.org/project/azure-ai-agentserver-core/) host framework and supports two transport modes: + +- **HTTP** (`invocations` protocol) — `POST /invocations`, `GET /invocations/{id}`, `POST /invocations/{id}/cancel`, `GET /invocations/docs/openapi.json` +- **WebSocket** (`invocations_ws` protocol) — persistent WebSocket at `/invocations_ws/ws` with invoke, get, cancel, and streaming over a single connection ## Getting started @@ -182,6 +185,253 @@ app = InvocationAgentServerHost(openapi_spec={ }) ``` +--- + +## WebSocket Protocol (`invocations_ws`) + +The package also ships an alternative transport that runs the same invocation lifecycle over a single persistent **WebSocket** long connection. Use this when you want lower latency for streaming, full-duplex agent interactions, or to avoid HTTP request overhead per turn. + +### InvocationWSAgentServerHost + +`InvocationWSAgentServerHost` is an `AgentServerHost` subclass that adds a WebSocket endpoint for the `invocations_ws` protocol. It exposes decorator methods for registering handler functions: + +- `@app.ws_invoke_handler` — **Required.** Handles `invoke` actions. Supports both async functions (non-streaming) and async generators (streaming). +- `@app.ws_get_invocation_handler` — Optional. Handles `get_invocation` actions. +- `@app.ws_cancel_invocation_handler` — Optional. Handles `cancel_invocation` actions. + +### InvocationWSContext + +WebSocket handler functions receive an `InvocationWSContext` object containing: + +- `context.invocation_id` — The invocation ID (echoed from client or auto-generated UUID). +- `context.session_id` — The resolved session ID. + +### InvocationWSError + +Handlers can raise `InvocationWSError(code, message)` to return a domain-specific error to the client without exposing internal details. + +### WebSocket endpoint + +All operations use a single persistent WebSocket connection: + +| Route | Description | +|---|---| +| `ws://host:port/invocations_ws/ws` | WebSocket endpoint for all `invocations_ws` operations | +| `GET /invocations_ws/docs/openapi.json` | Serve the agent's OpenAPI 3.x spec (HTTP) | +| `GET /readiness` | Health check (HTTP) | + +### Client → Server messages + +All messages are JSON text frames with an `action` field: + +```json +{"action": "invoke", "payload": {...}, "invocation_id": "optional", "session_id": "optional"} +{"action": "get_invocation", "invocation_id": "required"} +{"action": "cancel_invocation", "invocation_id": "required"} +{"action": "ping"} +{"action": "pong"} +``` + +### Server → Client messages + +```json +{"type": "result", "invocation_id": "...", "session_id": "...", "payload": {...}} +{"type": "stream_chunk", "invocation_id": "...", "session_id": "...", "payload": {...}} +{"type": "stream_end", "invocation_id": "...", "session_id": "..."} +{"type": "error", "invocation_id": "...", "error": {"code": "...", "message": "..."}} +{"type": "ping"} +{"type": "pong"} +``` + +### WebSocket keep-alive (ping/pong) + +Azure APIM and Azure Load Balancer silently drop idle WebSocket connections after approximately 4 minutes. To prevent this, the server sends periodic `{"type": "ping"}` messages to each connected client. + +- **Default interval**: 30 seconds. +- **Disable**: `ws_ping_interval=0`. +- **Custom**: any positive integer, e.g. `ws_ping_interval=15`. + +```python +app = InvocationWSAgentServerHost(ws_ping_interval=20) # ping every 20 seconds +``` + +Clients should respond with `{"action": "pong"}` when they receive a `{"type": "ping"}` message. Clients may also send `{"action": "ping"}` at any time; the server replies with `{"type": "pong"}`. + +### Session ID resolution (WebSocket) + +Session IDs group related invocations. Resolution order: + +1. `session_id` field in the WebSocket message +2. `FOUNDRY_AGENT_SESSION_ID` environment variable +3. Auto-generated UUID + +### Distributed tracing (WebSocket) + +When tracing is enabled on the `AgentServerHost`, `invocations_ws` spans are automatically created with GenAI semantic conventions: + +- **Span name**: `invoke_agent {FOUNDRY_AGENT_NAME}:{FOUNDRY_AGENT_VERSION}` +- **Span attributes**: `gen_ai.system`, `gen_ai.operation.name`, `gen_ai.response.id`, `gen_ai.conversation.id`, `gen_ai.agent.id`, `gen_ai.agent.name`, `gen_ai.agent.version` +- **Error tags**: `azure.ai.agentserver.invocations_ws.error.code`, `.error.message` + +### WebSocket examples + +#### Simple agent + +```python +from azure.ai.agentserver.invocations import InvocationWSAgentServerHost, InvocationWSContext + +app = InvocationWSAgentServerHost() + + +@app.ws_invoke_handler +async def handle(payload: dict, context: InvocationWSContext) -> dict: + return {"greeting": f"Hello, {payload['name']}!"} + +app.run() +``` + +**Client** (using the `websockets` library): + +```python +import asyncio, json, websockets + +async def main(): + async with websockets.connect("ws://localhost:8088/invocations_ws/ws") as ws: + await ws.send(json.dumps({ + "action": "invoke", + "payload": {"name": "Alice"} + })) + while True: + msg = json.loads(await ws.recv()) + if msg["type"] == "ping": + await ws.send(json.dumps({"action": "pong"})) + elif msg["type"] == "result": + print(msg["payload"]["greeting"]) # Hello, Alice! + break + +asyncio.run(main()) +``` + +#### Long-running operations with get/cancel + +```python +import asyncio + +from azure.ai.agentserver.invocations import ( + InvocationWSAgentServerHost, + InvocationWSContext, + InvocationWSError, +) + +_tasks: dict[str, asyncio.Task] = {} +_results: dict[str, dict] = {} + +app = InvocationWSAgentServerHost() + + +@app.ws_invoke_handler +async def handle(payload: dict, context: InvocationWSContext) -> dict: + task = asyncio.create_task(do_work(context.invocation_id, payload)) + _tasks[context.invocation_id] = task + return {"invocation_id": context.invocation_id, "status": "running"} + + +@app.ws_get_invocation_handler +async def get_invocation(context: InvocationWSContext) -> dict: + if context.invocation_id in _results: + return _results[context.invocation_id] + if context.invocation_id in _tasks: + return {"invocation_id": context.invocation_id, "status": "running"} + raise InvocationWSError("not_found", "Invocation not found") + + +@app.ws_cancel_invocation_handler +async def cancel_invocation(context: InvocationWSContext) -> dict: + if context.invocation_id in _tasks: + _tasks[context.invocation_id].cancel() + del _tasks[context.invocation_id] + return {"invocation_id": context.invocation_id, "status": "cancelled"} + raise InvocationWSError("not_found", "Invocation not found") +``` + +#### Streaming + +Use an async generator to stream chunks back to the client. Each yielded dict is sent as a `stream_chunk` message, followed by a `stream_end` when the generator completes. + +```python +from azure.ai.agentserver.invocations import InvocationWSAgentServerHost, InvocationWSContext + +app = InvocationWSAgentServerHost() + + +@app.ws_invoke_handler +async def handle(payload: dict, context: InvocationWSContext): + for word in ["Hello", " ", "world", "!"]: + yield {"delta": word} +``` + +#### Multi-turn conversation + +Use the `session_id` field to group invocations over the same WebSocket connection: + +```python +import asyncio, json, websockets + +async def main(): + async with websockets.connect("ws://localhost:8088/invocations_ws/ws") as ws: + # First turn + await ws.send(json.dumps({ + "action": "invoke", + "session_id": "session-abc", + "payload": {"message": "My name is Alice"}, + })) + print(json.loads(await ws.recv())) + + # Second turn (same session, same connection) + await ws.send(json.dumps({ + "action": "invoke", + "session_id": "session-abc", + "payload": {"message": "What is my name?"}, + })) + print(json.loads(await ws.recv())) + +asyncio.run(main()) +``` + +#### Combined HTTP + WebSocket host + +Use cooperative multiple inheritance to serve both `invocations` (HTTP) and `invocations_ws` (WebSocket) protocols on the same server: + +```python +from azure.ai.agentserver.invocations import ( + InvocationAgentServerHost, + InvocationWSAgentServerHost, + InvocationWSContext, +) +from starlette.requests import Request +from starlette.responses import JSONResponse, Response + + +class MyAgentHost(InvocationWSAgentServerHost, InvocationAgentServerHost): + pass + + +app = MyAgentHost() + + +@app.invoke_handler # HTTP — POST /invocations +async def handle_http(request: Request) -> Response: + data = await request.json() + return JSONResponse({"greeting": f"Hello, {data['name']}!"}) + + +@app.ws_invoke_handler # WebSocket — /invocations_ws/ws +async def handle_ws(payload: dict, context: InvocationWSContext) -> dict: + return {"greeting": f"Hello, {payload['name']}!"} + +app.run() +``` + ## Troubleshooting ### Reporting issues @@ -194,8 +444,9 @@ Visit the [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/ | Sample | Description | |---|---| -| [simple_invoke_agent](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-invocations/samples/simple_invoke_agent/) | Minimal synchronous request-response | -| [async_invoke_agent](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-invocations/samples/async_invoke_agent/) | Long-running operations with polling and cancellation | +| [simple_invoke_agent](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-invocations/samples/simple_invoke_agent/) | Minimal synchronous request-response (HTTP) | +| [async_invoke_agent](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-invocations/samples/async_invoke_agent/) | Long-running operations with polling and cancellation (HTTP) | +| [streaming_ws_invoke_agent](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/) | Streaming token-by-token echo over both WebSocket and HTTP (SSE) | ## Contributing diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/__init__.py b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/__init__.py index 23e2b4d7dcbf..4d2c30227e52 100644 --- a/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/__init__.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/__init__.py @@ -3,10 +3,10 @@ # --------------------------------------------------------- """Invocations protocol for Azure AI Hosted Agents. -This package provides an invocation protocol host as a subclass of +This package provides invocation protocol hosts as subclasses of :class:`~azure.ai.agentserver.core.AgentServerHost`. -Quick start:: +**HTTP protocol** (``invocations``):: from azure.ai.agentserver.invocations import InvocationAgentServerHost from starlette.responses import JSONResponse @@ -18,11 +18,29 @@ async def handle(request): return JSONResponse({"ok": True}) app.run() + +**WebSocket protocol** (``invocations_ws``):: + + from azure.ai.agentserver.invocations import InvocationWSAgentServerHost, InvocationWSContext + + app = InvocationWSAgentServerHost() + + @app.ws_invoke_handler + async def handle(payload, context): + return {"reply": "hello"} + + app.run() """ __path__ = __import__("pkgutil").extend_path(__path__, __name__) from ._invocation import InvocationAgentServerHost +from ._invocation_ws import InvocationWSAgentServerHost, InvocationWSContext, InvocationWSError from ._version import VERSION -__all__ = ["InvocationAgentServerHost"] +__all__ = [ + "InvocationAgentServerHost", + "InvocationWSAgentServerHost", + "InvocationWSContext", + "InvocationWSError", +] __version__ = VERSION diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/_websocket.py b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_invocation_ws.py similarity index 60% rename from sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/_websocket.py rename to sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_invocation_ws.py index bab56b9a145d..5fd68f4c3c73 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/_websocket.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_invocation_ws.py @@ -1,9 +1,9 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Websocket protocol host for Azure AI Hosted Agents (WebSocket). +"""WebSocket invocation protocol host for Azure AI Hosted Agents. -Provides the websocket protocol over WebSocket long connections +Provides the invocation protocol over WebSocket long connections as a :class:`~azure.ai.agentserver.core.AgentServerHost` subclass. """ import asyncio @@ -30,7 +30,7 @@ record_error, ) -from ._constants import WebsocketConstants +from ._ws_constants import InvocationWSConstants logger = logging.getLogger("azure.ai.agentserver") @@ -59,23 +59,23 @@ def _sanitize_id(value: str, fallback: str) -> str: @dataclass -class WebsocketContext: - """Contextual information for a websocket request. +class InvocationWSContext: + """Contextual information for a WebSocket invocation request. - Passed to handler functions registered via :meth:`invoke_handler`, - :meth:`get_websocket_handler`, and :meth:`cancel_websocket_handler`. + Passed to handler functions registered via :meth:`ws_invoke_handler`, + :meth:`ws_get_invocation_handler`, and :meth:`ws_cancel_invocation_handler`. - :param websocket_id: Unique identifier for this websocket. - :type websocket_id: str - :param session_id: Session identifier for this websocket. + :param invocation_id: Unique identifier for this invocation. + :type invocation_id: str + :param session_id: Session identifier for this invocation. :type session_id: str """ - websocket_id: str + invocation_id: str session_id: str -class WebsocketError(Exception): +class InvocationWSError(Exception): """Raised by handlers to signal a domain-specific error. :param code: Machine-readable error code. @@ -90,42 +90,42 @@ def __init__(self, code: str, message: str) -> None: super().__init__(message) -class WebsocketAgentServerHost(AgentServerHost): - """Websocket protocol host for Azure AI Hosted Agents over WebSocket. +class InvocationWSAgentServerHost(AgentServerHost): + """WebSocket invocation protocol host for Azure AI Hosted Agents. A :class:`~azure.ai.agentserver.core.AgentServerHost` subclass that adds - a WebSocket endpoint for the websocket protocol. Use the decorator + a WebSocket endpoint for the ``invocations_ws`` protocol. Use the decorator methods to wire handler functions to messages. - WebSocket endpoint: ``/websocket/ws`` + WebSocket endpoint: ``/invocations_ws/ws`` **Client → Server messages** (JSON text frames):: - {"action": "invoke", "websocket_id": "opt", "session_id": "opt", "payload": {...}} - {"action": "get_websocket", "websocket_id": "required"} - {"action": "cancel_websocket", "websocket_id": "required"} + {"action": "invoke", "invocation_id": "opt", "session_id": "opt", "payload": {...}} + {"action": "get_invocation", "invocation_id": "required"} + {"action": "cancel_invocation", "invocation_id": "required"} **Server → Client messages** (JSON text frames):: - {"type": "result", "websocket_id": "...", "session_id": "...", "payload": {...}} - {"type": "stream_chunk", "websocket_id": "...", "session_id": "...", "payload": {...}} - {"type": "stream_end", "websocket_id": "...", "session_id": "..."} - {"type": "error", "websocket_id": "...", "error": {"code": "...", "message": "..."}} + {"type": "result", "invocation_id": "...", "session_id": "...", "payload": {...}} + {"type": "stream_chunk", "invocation_id": "...", "session_id": "...", "payload": {...}} + {"type": "stream_end", "invocation_id": "...", "session_id": "..."} + {"type": "error", "invocation_id": "...", "error": {"code": "...", "message": "..."}} Usage:: - from azure.ai.agentserver.websocket import WebsocketAgentServerHost, WebsocketContext + from azure.ai.agentserver.invocations import InvocationWSAgentServerHost, InvocationWSContext - app = WebsocketAgentServerHost() + app = InvocationWSAgentServerHost() - @app.invoke_handler + @app.ws_invoke_handler async def handle(payload, context): return {"reply": "hello"} app.run() :param openapi_spec: Optional OpenAPI spec dict. When provided, the spec - is served at ``GET /websocket/docs/openapi.json``. + is served at ``GET /invocations_ws/docs/openapi.json``. :type openapi_spec: Optional[dict[str, Any]] :param ws_ping_interval: Interval in seconds between keep-alive ping frames sent to each connected WebSocket client. Keeps the @@ -142,45 +142,45 @@ def __init__( ws_ping_interval: Optional[int] = None, **kwargs: Any, ) -> None: - self._invoke_fn: Optional[Callable] = None - self._get_websocket_fn: Optional[Callable] = None - self._cancel_websocket_fn: Optional[Callable] = None - self._openapi_spec = openapi_spec + self._ws_invoke_fn: Optional[Callable] = None + self._ws_get_invocation_fn: Optional[Callable] = None + self._ws_cancel_invocation_fn: Optional[Callable] = None + self._ws_openapi_spec = openapi_spec self._ws_ping_interval: int = ( ws_ping_interval if ws_ping_interval is not None - else WebsocketConstants.DEFAULT_WS_PING_INTERVAL + else InvocationWSConstants.DEFAULT_WS_PING_INTERVAL ) - # Build websocket routes - websocket_routes: list[Any] = [ + # Build WebSocket routes + ws_routes: list[Any] = [ Route( - "/websocket/docs/openapi.json", - self._get_openapi_spec_endpoint, + "/invocations_ws/docs/openapi.json", + self._ws_get_openapi_spec_endpoint, methods=["GET"], - name="get_openapi_spec", + name="ws_get_openapi_spec", ), WebSocketRoute( - "/websocket/ws", + "/invocations_ws/ws", self._websocket_endpoint, - name="websocket_ws", + name="invocations_ws", ), ] # Merge with any routes from sibling mixins via cooperative init existing = list(kwargs.pop("routes", None) or []) - super().__init__(routes=existing + websocket_routes, **kwargs) + super().__init__(routes=existing + ws_routes, **kwargs) # ------------------------------------------------------------------ # Handler decorators # ------------------------------------------------------------------ - def invoke_handler( + def ws_invoke_handler( self, fn: Callable[..., Any] ) -> Callable[..., Any]: - """Register a function as the invoke handler. + """Register a function as the WebSocket invoke handler. - The handler receives ``(payload: dict, context: WebsocketContext)`` + The handler receives ``(payload: dict, context: InvocationWSContext)`` and may be: - An async function returning a ``dict`` (non-streaming). @@ -188,12 +188,12 @@ def invoke_handler( Usage:: - @app.invoke_handler + @app.ws_invoke_handler async def handle(payload, context): return {"reply": f"echo: {payload}"} # Streaming variant: - @app.invoke_handler + @app.ws_invoke_handler async def handle(payload, context): for token in tokens: yield {"token": token} @@ -206,18 +206,18 @@ async def handle(payload, context): """ if not (inspect.iscoroutinefunction(fn) or inspect.isasyncgenfunction(fn)): raise TypeError( - f"invoke_handler expects an async function or async generator, got {type(fn).__name__}. " + f"ws_invoke_handler expects an async function or async generator, got {type(fn).__name__}. " "Use 'async def' to define your handler." ) - self._invoke_fn = fn + self._ws_invoke_fn = fn return fn - def get_websocket_handler( + def ws_get_invocation_handler( self, fn: Callable[..., Any] ) -> Callable[..., Any]: - """Register a function as the get-websocket handler. + """Register a function as the WebSocket get-invocation handler. - The handler receives ``(context: WebsocketContext)`` and returns + The handler receives ``(context: InvocationWSContext)`` and returns a ``dict``. :param fn: Async function. @@ -228,18 +228,18 @@ def get_websocket_handler( """ if not inspect.iscoroutinefunction(fn): raise TypeError( - f"get_websocket_handler expects an async function, got {type(fn).__name__}. " + f"ws_get_invocation_handler expects an async function, got {type(fn).__name__}. " "Use 'async def' to define your handler." ) - self._get_websocket_fn = fn + self._ws_get_invocation_fn = fn return fn - def cancel_websocket_handler( + def ws_cancel_invocation_handler( self, fn: Callable[..., Any] ) -> Callable[..., Any]: - """Register a function as the cancel-websocket handler. + """Register a function as the WebSocket cancel-invocation handler. - The handler receives ``(context: WebsocketContext)`` and returns + The handler receives ``(context: InvocationWSContext)`` and returns a ``dict``. :param fn: Async function. @@ -250,22 +250,22 @@ def cancel_websocket_handler( """ if not inspect.iscoroutinefunction(fn): raise TypeError( - f"cancel_websocket_handler expects an async function, got {type(fn).__name__}. " + f"ws_cancel_invocation_handler expects an async function, got {type(fn).__name__}. " "Use 'async def' to define your handler." ) - self._cancel_websocket_fn = fn + self._ws_cancel_invocation_fn = fn return fn # ------------------------------------------------------------------ # OpenAPI spec (HTTP endpoint — documentation) # ------------------------------------------------------------------ - def get_openapi_spec(self) -> Optional[dict[str, Any]]: - """Return the stored OpenAPI spec, or None.""" - return self._openapi_spec + def get_ws_openapi_spec(self) -> Optional[dict[str, Any]]: + """Return the stored WebSocket OpenAPI spec, or None.""" + return self._ws_openapi_spec - async def _get_openapi_spec_endpoint(self, request: Request) -> Response: # pylint: disable=unused-argument - spec = self.get_openapi_spec() + async def _ws_get_openapi_spec_endpoint(self, request: Request) -> Response: # pylint: disable=unused-argument + spec = self.get_ws_openapi_spec() if spec is None: return create_error_response("not_found", "No OpenAPI spec registered", status_code=404) return JSONResponse(spec) @@ -275,7 +275,7 @@ async def _get_openapi_spec_endpoint(self, request: Request) -> Response: # pyl # ------------------------------------------------------------------ @staticmethod - def _safe_set_attrs(span: Any, attrs: dict[str, str]) -> None: + def _ws_safe_set_attrs(span: Any, attrs: dict[str, str]) -> None: if span is None: return try: @@ -288,10 +288,10 @@ def _safe_set_attrs(span: Any, attrs: dict[str, str]) -> None: # Span context manager # ------------------------------------------------------------------ - def _request_span( + def _ws_request_span( self, headers: Any, - websocket_id: str, + invocation_id: str, span_operation: str, operation_name: Optional[str] = None, session_id: str = "", @@ -300,8 +300,8 @@ def _request_span( :param headers: HTTP/WebSocket handshake headers. :type headers: any - :param websocket_id: The request/websocket ID. - :type websocket_id: str + :param invocation_id: The request/invocation ID. + :type invocation_id: str :param span_operation: Span operation name. :type span_operation: str :param operation_name: Optional ``gen_ai.operation.name`` value. @@ -312,15 +312,15 @@ def _request_span( :rtype: any """ return self.request_span( - headers, websocket_id, span_operation, + headers, invocation_id, span_operation, operation_name=operation_name, session_id=session_id, end_on_exit=False, ) - def _simple_request_span( + def _ws_simple_request_span( self, headers: Any, - websocket_id: str, + invocation_id: str, span_operation: str, session_id: str = "", ) -> Any: @@ -330,8 +330,8 @@ def _simple_request_span( :param headers: HTTP/WebSocket handshake headers. :type headers: any - :param websocket_id: The request/websocket ID. - :type websocket_id: str + :param invocation_id: The request/invocation ID. + :type invocation_id: str :param span_operation: Span operation name. :type span_operation: str :param session_id: Session ID (empty string if absent). @@ -340,7 +340,7 @@ def _simple_request_span( :rtype: any """ return self.request_span( - headers, websocket_id, span_operation, + headers, invocation_id, span_operation, session_id=session_id, ) @@ -362,13 +362,13 @@ async def _ws_ping_loop(self, websocket: WebSocket) -> None: try: while True: await asyncio.sleep(self._ws_ping_interval) - await websocket.send_json({"type": WebsocketConstants.MSG_TYPE_PING}) + await websocket.send_json({"type": InvocationWSConstants.MSG_TYPE_PING}) except (WebSocketDisconnect, Exception): # pylint: disable=broad-exception-caught # Connection closed or errored — let the task exit silently. pass async def _websocket_endpoint(self, websocket: WebSocket) -> None: - """Main WebSocket endpoint for the websocket protocol. + """Main WebSocket endpoint for the invocations_ws protocol. Accepts a WebSocket connection and processes JSON messages in a loop. Each message must contain an ``action`` field. @@ -394,34 +394,34 @@ async def _websocket_endpoint(self, websocket: WebSocket) -> None: message = json.loads(raw) except (json.JSONDecodeError, ValueError): await websocket.send_json({ - "type": WebsocketConstants.MSG_TYPE_ERROR, + "type": InvocationWSConstants.MSG_TYPE_ERROR, "error": {"code": "invalid_json", "message": "Invalid JSON message"}, }) continue if not isinstance(message, dict): await websocket.send_json({ - "type": WebsocketConstants.MSG_TYPE_ERROR, + "type": InvocationWSConstants.MSG_TYPE_ERROR, "error": {"code": "invalid_message", "message": "Message must be a JSON object"}, }) continue action = message.get("action") - if action == WebsocketConstants.ACTION_INVOKE: + if action == InvocationWSConstants.ACTION_INVOKE: await self._handle_ws_invoke(websocket, message) - elif action == WebsocketConstants.ACTION_GET_WEBSOCKET: - await self._handle_ws_get_websocket(websocket, message) - elif action == WebsocketConstants.ACTION_CANCEL_WEBSOCKET: - await self._handle_ws_cancel_websocket(websocket, message) - elif action == WebsocketConstants.ACTION_PING: + elif action == InvocationWSConstants.ACTION_GET_INVOCATION: + await self._handle_ws_get_invocation(websocket, message) + elif action == InvocationWSConstants.ACTION_CANCEL_INVOCATION: + await self._handle_ws_cancel_invocation(websocket, message) + elif action == InvocationWSConstants.ACTION_PING: # Client-initiated ping — respond with pong. - await websocket.send_json({"type": WebsocketConstants.MSG_TYPE_PONG}) - elif action == WebsocketConstants.ACTION_PONG: + await websocket.send_json({"type": InvocationWSConstants.MSG_TYPE_PONG}) + elif action == InvocationWSConstants.ACTION_PONG: # Client pong response — no-op, already kept connection alive. pass else: await websocket.send_json({ - "type": WebsocketConstants.MSG_TYPE_ERROR, + "type": InvocationWSConstants.MSG_TYPE_ERROR, "error": { "code": "invalid_action", "message": f"Unknown action: {action}", @@ -443,8 +443,8 @@ async def _websocket_endpoint(self, websocket: WebSocket) -> None: async def _handle_ws_invoke(self, websocket: WebSocket, message: dict[str, Any]) -> None: generated_id = str(uuid.uuid4()) - raw_websocket_id = message.get("websocket_id") or "" - websocket_id = _sanitize_id(raw_websocket_id, generated_id) + raw_invocation_id = message.get("invocation_id") or "" + invocation_id = _sanitize_id(raw_invocation_id, generated_id) raw_session_id = ( message.get("session_id") @@ -453,78 +453,78 @@ async def _handle_ws_invoke(self, websocket: WebSocket, message: dict[str, Any]) ) session_id = _sanitize_id(raw_session_id, str(uuid.uuid4())) - context = WebsocketContext(websocket_id=websocket_id, session_id=session_id) + context = InvocationWSContext(invocation_id=invocation_id, session_id=session_id) payload = message.get("payload", {}) - if self._invoke_fn is None: + if self._ws_invoke_fn is None: await websocket.send_json({ - "type": WebsocketConstants.MSG_TYPE_ERROR, - "websocket_id": websocket_id, + "type": InvocationWSConstants.MSG_TYPE_ERROR, + "invocation_id": invocation_id, "session_id": session_id, "error": { "code": "not_implemented", - "message": "No invoke handler registered. Use the @app.invoke_handler decorator.", + "message": "No invoke handler registered. Use the @app.ws_invoke_handler decorator.", }, }) return - with self._request_span( - websocket.headers, websocket_id, "invoke_agent", + with self._ws_request_span( + websocket.headers, invocation_id, "invoke_agent", operation_name="invoke_agent", session_id=session_id, ) as otel_span: - self._safe_set_attrs(otel_span, { - WebsocketConstants.ATTR_SPAN_WEBSOCKET_ID: websocket_id, - WebsocketConstants.ATTR_SPAN_SESSION_ID: session_id, + self._ws_safe_set_attrs(otel_span, { + InvocationWSConstants.ATTR_SPAN_INVOCATION_ID: invocation_id, + InvocationWSConstants.ATTR_SPAN_SESSION_ID: session_id, }) try: - if inspect.isasyncgenfunction(self._invoke_fn): + if inspect.isasyncgenfunction(self._ws_invoke_fn): # Streaming response - async for chunk in self._invoke_fn(payload, context): + async for chunk in self._ws_invoke_fn(payload, context): await websocket.send_json({ - "type": WebsocketConstants.MSG_TYPE_STREAM_CHUNK, - "websocket_id": websocket_id, + "type": InvocationWSConstants.MSG_TYPE_STREAM_CHUNK, + "invocation_id": invocation_id, "session_id": session_id, "payload": chunk, }) await websocket.send_json({ - "type": WebsocketConstants.MSG_TYPE_STREAM_END, - "websocket_id": websocket_id, + "type": InvocationWSConstants.MSG_TYPE_STREAM_END, + "invocation_id": invocation_id, "session_id": session_id, }) else: # Non-streaming response - result = await self._invoke_fn(payload, context) + result = await self._ws_invoke_fn(payload, context) await websocket.send_json({ - "type": WebsocketConstants.MSG_TYPE_RESULT, - "websocket_id": websocket_id, + "type": InvocationWSConstants.MSG_TYPE_RESULT, + "invocation_id": invocation_id, "session_id": session_id, "payload": result, }) - except WebsocketError as exc: - self._safe_set_attrs(otel_span, { - WebsocketConstants.ATTR_SPAN_ERROR_CODE: exc.code, - WebsocketConstants.ATTR_SPAN_ERROR_MESSAGE: exc.message, + except InvocationWSError as exc: + self._ws_safe_set_attrs(otel_span, { + InvocationWSConstants.ATTR_SPAN_ERROR_CODE: exc.code, + InvocationWSConstants.ATTR_SPAN_ERROR_MESSAGE: exc.message, }) end_span(otel_span, exc=exc) - logger.error("Websocket %s failed: %s", websocket_id, exc) + logger.error("Invocation %s failed: %s", invocation_id, exc) await websocket.send_json({ - "type": WebsocketConstants.MSG_TYPE_ERROR, - "websocket_id": websocket_id, + "type": InvocationWSConstants.MSG_TYPE_ERROR, + "invocation_id": invocation_id, "session_id": session_id, "error": {"code": exc.code, "message": exc.message}, }) return except Exception as exc: # pylint: disable=broad-exception-caught - self._safe_set_attrs(otel_span, { - WebsocketConstants.ATTR_SPAN_ERROR_CODE: "internal_error", - WebsocketConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), + self._ws_safe_set_attrs(otel_span, { + InvocationWSConstants.ATTR_SPAN_ERROR_CODE: "internal_error", + InvocationWSConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), }) end_span(otel_span, exc=exc) - logger.error("Error processing websocket %s: %s", websocket_id, exc, exc_info=True) + logger.error("Error processing invocation %s: %s", invocation_id, exc, exc_info=True) await websocket.send_json({ - "type": WebsocketConstants.MSG_TYPE_ERROR, - "websocket_id": websocket_id, + "type": InvocationWSConstants.MSG_TYPE_ERROR, + "invocation_id": invocation_id, "session_id": session_id, "error": {"code": "internal_error", "message": "Internal server error"}, }) @@ -534,119 +534,119 @@ async def _handle_ws_invoke(self, websocket: WebSocket, message: dict[str, Any]) end_span(otel_span) # ------------------------------------------------------------------ - # Get-websocket handler + # Get-invocation handler # ------------------------------------------------------------------ - async def _handle_ws_get_websocket(self, websocket: WebSocket, message: dict[str, Any]) -> None: - websocket_id = message.get("websocket_id") or "" - if not websocket_id: + async def _handle_ws_get_invocation(self, websocket: WebSocket, message: dict[str, Any]) -> None: + invocation_id = message.get("invocation_id") or "" + if not invocation_id: await websocket.send_json({ - "type": WebsocketConstants.MSG_TYPE_ERROR, - "error": {"code": "invalid_request", "message": "websocket_id is required"}, + "type": InvocationWSConstants.MSG_TYPE_ERROR, + "error": {"code": "invalid_request", "message": "invocation_id is required"}, }) return session_id = message.get("session_id") or "" - context = WebsocketContext(websocket_id=websocket_id, session_id=session_id) + context = InvocationWSContext(invocation_id=invocation_id, session_id=session_id) - with self._simple_request_span( - websocket.headers, websocket_id, "get_websocket", + with self._ws_simple_request_span( + websocket.headers, invocation_id, "get_invocation", session_id=session_id, ) as otel_span: - self._safe_set_attrs(otel_span, { - WebsocketConstants.ATTR_SPAN_WEBSOCKET_ID: websocket_id, - WebsocketConstants.ATTR_SPAN_SESSION_ID: session_id, + self._ws_safe_set_attrs(otel_span, { + InvocationWSConstants.ATTR_SPAN_INVOCATION_ID: invocation_id, + InvocationWSConstants.ATTR_SPAN_SESSION_ID: session_id, }) - if self._get_websocket_fn is None: + if self._ws_get_invocation_fn is None: await websocket.send_json({ - "type": WebsocketConstants.MSG_TYPE_ERROR, - "websocket_id": websocket_id, - "error": {"code": "not_found", "message": "get_websocket not implemented"}, + "type": InvocationWSConstants.MSG_TYPE_ERROR, + "invocation_id": invocation_id, + "error": {"code": "not_found", "message": "get_invocation not implemented"}, }) return try: - result = await self._get_websocket_fn(context) + result = await self._ws_get_invocation_fn(context) await websocket.send_json({ - "type": WebsocketConstants.MSG_TYPE_RESULT, - "websocket_id": websocket_id, + "type": InvocationWSConstants.MSG_TYPE_RESULT, + "invocation_id": invocation_id, "payload": result, }) - except WebsocketError as exc: + except InvocationWSError as exc: await websocket.send_json({ - "type": WebsocketConstants.MSG_TYPE_ERROR, - "websocket_id": websocket_id, + "type": InvocationWSConstants.MSG_TYPE_ERROR, + "invocation_id": invocation_id, "error": {"code": exc.code, "message": exc.message}, }) except Exception as exc: # pylint: disable=broad-exception-caught - self._safe_set_attrs(otel_span, { - WebsocketConstants.ATTR_SPAN_ERROR_CODE: "internal_error", - WebsocketConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), + self._ws_safe_set_attrs(otel_span, { + InvocationWSConstants.ATTR_SPAN_ERROR_CODE: "internal_error", + InvocationWSConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), }) record_error(otel_span, exc) - logger.error("Error in get_websocket %s: %s", websocket_id, exc, exc_info=True) + logger.error("Error in get_invocation %s: %s", invocation_id, exc, exc_info=True) await websocket.send_json({ - "type": WebsocketConstants.MSG_TYPE_ERROR, - "websocket_id": websocket_id, + "type": InvocationWSConstants.MSG_TYPE_ERROR, + "invocation_id": invocation_id, "error": {"code": "internal_error", "message": "Internal server error"}, }) # ------------------------------------------------------------------ - # Cancel-websocket handler + # Cancel-invocation handler # ------------------------------------------------------------------ - async def _handle_ws_cancel_websocket(self, websocket: WebSocket, message: dict[str, Any]) -> None: - websocket_id = message.get("websocket_id") or "" - if not websocket_id: + async def _handle_ws_cancel_invocation(self, websocket: WebSocket, message: dict[str, Any]) -> None: + invocation_id = message.get("invocation_id") or "" + if not invocation_id: await websocket.send_json({ - "type": WebsocketConstants.MSG_TYPE_ERROR, - "error": {"code": "invalid_request", "message": "websocket_id is required"}, + "type": InvocationWSConstants.MSG_TYPE_ERROR, + "error": {"code": "invalid_request", "message": "invocation_id is required"}, }) return session_id = message.get("session_id") or "" - context = WebsocketContext(websocket_id=websocket_id, session_id=session_id) + context = InvocationWSContext(invocation_id=invocation_id, session_id=session_id) - with self._simple_request_span( - websocket.headers, websocket_id, "cancel_websocket", + with self._ws_simple_request_span( + websocket.headers, invocation_id, "cancel_invocation", session_id=session_id, ) as otel_span: - self._safe_set_attrs(otel_span, { - WebsocketConstants.ATTR_SPAN_WEBSOCKET_ID: websocket_id, - WebsocketConstants.ATTR_SPAN_SESSION_ID: session_id, + self._ws_safe_set_attrs(otel_span, { + InvocationWSConstants.ATTR_SPAN_INVOCATION_ID: invocation_id, + InvocationWSConstants.ATTR_SPAN_SESSION_ID: session_id, }) - if self._cancel_websocket_fn is None: + if self._ws_cancel_invocation_fn is None: await websocket.send_json({ - "type": WebsocketConstants.MSG_TYPE_ERROR, - "websocket_id": websocket_id, - "error": {"code": "not_found", "message": "cancel_websocket not implemented"}, + "type": InvocationWSConstants.MSG_TYPE_ERROR, + "invocation_id": invocation_id, + "error": {"code": "not_found", "message": "cancel_invocation not implemented"}, }) return try: - result = await self._cancel_websocket_fn(context) + result = await self._ws_cancel_invocation_fn(context) await websocket.send_json({ - "type": WebsocketConstants.MSG_TYPE_RESULT, - "websocket_id": websocket_id, + "type": InvocationWSConstants.MSG_TYPE_RESULT, + "invocation_id": invocation_id, "payload": result, }) - except WebsocketError as exc: + except InvocationWSError as exc: await websocket.send_json({ - "type": WebsocketConstants.MSG_TYPE_ERROR, - "websocket_id": websocket_id, + "type": InvocationWSConstants.MSG_TYPE_ERROR, + "invocation_id": invocation_id, "error": {"code": exc.code, "message": exc.message}, }) except Exception as exc: # pylint: disable=broad-exception-caught - self._safe_set_attrs(otel_span, { - WebsocketConstants.ATTR_SPAN_ERROR_CODE: "internal_error", - WebsocketConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), + self._ws_safe_set_attrs(otel_span, { + InvocationWSConstants.ATTR_SPAN_ERROR_CODE: "internal_error", + InvocationWSConstants.ATTR_SPAN_ERROR_MESSAGE: str(exc), }) record_error(otel_span, exc) - logger.error("Error in cancel_websocket %s: %s", websocket_id, exc, exc_info=True) + logger.error("Error in cancel_invocation %s: %s", invocation_id, exc, exc_info=True) await websocket.send_json({ - "type": WebsocketConstants.MSG_TYPE_ERROR, - "websocket_id": websocket_id, + "type": InvocationWSConstants.MSG_TYPE_ERROR, + "invocation_id": invocation_id, "error": {"code": "internal_error", "message": "Internal server error"}, }) diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/_constants.py b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_ws_constants.py similarity index 53% rename from sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/_constants.py rename to sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_ws_constants.py index 699427ccd507..c7473f530c05 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/_constants.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_ws_constants.py @@ -3,10 +3,11 @@ # --------------------------------------------------------- -class WebsocketConstants: - """Websocket protocol constants. +class InvocationWSConstants: + """WebSocket invocation protocol constants. - Protocol-specific constants for the WebSocket websocket protocol. + Protocol-specific constants for the WebSocket invocation protocol + (``invocations_ws``). """ # WebSocket message types (server → client) @@ -19,8 +20,8 @@ class WebsocketConstants: # WebSocket actions (client → server) ACTION_INVOKE = "invoke" - ACTION_GET_WEBSOCKET = "get_websocket" - ACTION_CANCEL_WEBSOCKET = "cancel_websocket" + ACTION_GET_INVOCATION = "get_invocation" + ACTION_CANCEL_INVOCATION = "cancel_invocation" ACTION_PING = "ping" ACTION_PONG = "pong" @@ -28,7 +29,7 @@ class WebsocketConstants: DEFAULT_WS_PING_INTERVAL = 30 # seconds # Span attribute keys - ATTR_SPAN_WEBSOCKET_ID = "azure.ai.agentserver.websocket.websocket_id" - ATTR_SPAN_SESSION_ID = "azure.ai.agentserver.websocket.session_id" - ATTR_SPAN_ERROR_CODE = "azure.ai.agentserver.websocket.error.code" - ATTR_SPAN_ERROR_MESSAGE = "azure.ai.agentserver.websocket.error.message" + ATTR_SPAN_INVOCATION_ID = "azure.ai.agentserver.invocations_ws.invocation_id" + ATTR_SPAN_SESSION_ID = "azure.ai.agentserver.invocations_ws.session_id" + ATTR_SPAN_ERROR_CODE = "azure.ai.agentserver.invocations_ws.error.code" + ATTR_SPAN_ERROR_MESSAGE = "azure.ai.agentserver.invocations_ws.error.message" diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/pyproject.toml b/sdk/agentserver/azure-ai-agentserver-invocations/pyproject.toml index 7657fdf1df67..e2336af5a25e 100644 --- a/sdk/agentserver/azure-ai-agentserver-invocations/pyproject.toml +++ b/sdk/agentserver/azure-ai-agentserver-invocations/pyproject.toml @@ -18,7 +18,7 @@ classifiers = [ "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.14", ] -keywords = ["azure", "azure sdk", "agent", "agentserver", "invocations"] +keywords = ["azure", "azure sdk", "agent", "agentserver", "invocations", "websocket"] dependencies = [ "azure-ai-agentserver-core>=2.0.0b3", diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/README.md b/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/README.md similarity index 63% rename from sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/README.md rename to sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/README.md index 0d29025432dc..52a409d37f00 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/README.md +++ b/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/README.md @@ -1,25 +1,26 @@ **IMPORTANT!** All samples and other resources made available in this GitHub repository ("samples") are designed to assist in accelerating development of agents, solutions, and agent workflows for various scenarios. Review all provided resources and carefully test output behavior in the context of your use case. AI responses may be inaccurate and AI actions should be monitored with human oversight. -# Echo Agent — Websocket (WebSocket) + Invocations (HTTP SSE) Streaming +# Echo Agent — InvocationsWS (WebSocket) + Invocations (HTTP SSE) Streaming -This sample demonstrates a minimal echo agent that combines [azure-ai-agentserver-websocket](https://pypi.org/project/azure-ai-agentserver-websocket/) (WebSocket) and [azure-ai-agentserver-invocations](https://pypi.org/project/azure-ai-agentserver-invocations/) (HTTP SSE) on a single server, streaming responses word-by-word. It supports **two communication modes**: +This sample demonstrates a minimal echo agent that combines the `invocations_ws` (WebSocket) and `invocations` (HTTP SSE) protocols on a single server, streaming responses word-by-word. It supports **two communication modes**: -- **WebSocket** — persistent connection at `ws://localhost:8088/websocket/ws` +- **WebSocket** — persistent connection at `ws://localhost:8088/invocations_ws/ws` - **HTTP SSE** — stateless POST at `http://localhost:8088/invocations` +Both are served by a single `EchoAgentHost` class that uses cooperative multiple inheritance to mix `InvocationWSAgentServerHost` and `InvocationAgentServerHost`. + ## How It Works The agent receives user input and echoes it back with a `🔊 Echo:` prefix. Each word is streamed as a separate token chunk. -- **WebSocket mode** (Websocket protocol): tokens are sent as `stream_chunk` messages, followed by a `stream_end` signal. -- **HTTP SSE mode** (Invocations protocol): tokens are sent as `data:` lines per the Server-Sent Events spec, followed by an `event: done` signal. +- **WebSocket mode** (`invocations_ws` protocol): tokens are sent as `stream_chunk` messages, followed by a `stream_end` signal. +- **HTTP SSE mode** (`invocations` protocol): tokens are sent as `data:` lines per the Server-Sent Events spec, followed by an `event: done` signal. ## Running Locally ### Prerequisites - Python 3.10+ -- Azure CLI installed and authenticated (`az login`) ### Install Dependencies @@ -43,7 +44,7 @@ Using the `websockets` library: import asyncio, json, websockets async def main(): - async with websockets.connect("ws://localhost:8088/websocket/ws") as ws: + async with websockets.connect("ws://localhost:8088/invocations_ws/ws") as ws: await ws.send(json.dumps({ "action": "invoke", "payload": {"message": "Hello world!"} @@ -74,14 +75,20 @@ curl -N -X POST http://localhost:8088/invocations \ ### Browser Client -A browser-based client with a WebSocket/HTTP mode switcher is available under `../browser_client/`: +A browser-based client with a WebSocket/HTTP mode switcher is included under `browser_client/`: ```bash -cd ../browser_client +cd browser_client python client.py ``` -Then open `http://localhost:8080` and use the toggle to switch between WebSocket and HTTP (SSE) modes. +Then open `http://localhost:8080` in your browser and use the toggle to switch between WebSocket and HTTP (SSE) modes. + +To use a different port: + +```bash +python client.py --port 3000 +``` ## Deploying to Microsoft Foundry diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.yaml b/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/agent.yaml similarity index 79% rename from sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.yaml rename to sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/agent.yaml index 0eb9468b3c8d..7a57dd600eb0 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.yaml +++ b/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/agent.yaml @@ -1,7 +1,7 @@ kind: hosted name: echo-agent-streaming protocols: - - protocol: websocket + - protocol: invocations_ws version: v0.0.1 resources: cpu: "0.25" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/client.py b/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/browser_client/client.py similarity index 100% rename from sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/client.py rename to sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/browser_client/client.py diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/index.html b/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/browser_client/index.html similarity index 98% rename from sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/index.html rename to sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/browser_client/index.html index 577654a44c95..3f08060ac647 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/browser_client/index.html +++ b/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/browser_client/index.html @@ -49,7 +49,7 @@

Echo Agent Client

- + Disconnected @@ -99,8 +99,8 @@

Echo Agent Client

setInputEnabled(true); addMessage("Switched to HTTP (SSE) mode", "system"); } else { - urlInput.value = "ws://localhost:8088/websocket/ws"; - urlInput.placeholder = "ws://host:port/websocket/ws"; + urlInput.value = "ws://localhost:8088/invocations_ws/ws"; + urlInput.placeholder = "ws://host:port/invocations_ws/ws"; connectBtn.style.display = ""; disconnectBtn.style.display = ""; setStatus("disconnected"); diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/main.py b/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/main.py similarity index 68% rename from sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/main.py rename to sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/main.py index e9bca70b5cdb..c794e4c2efe7 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/main.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/main.py @@ -5,7 +5,7 @@ Echoes user input back as a stream, sending each word as a separate token chunk. Supports two communication modes: -- **WebSocket** at ``ws://localhost:8088/websocket/ws`` +- **WebSocket** at ``ws://localhost:8088/invocations_ws/ws`` - **HTTP SSE** at ``POST http://localhost:8088/invocations`` **Server** (this file):: @@ -17,7 +17,7 @@ import asyncio, json, websockets async def main(): - async with websockets.connect("ws://localhost:8088/websocket/ws") as ws: + async with websockets.connect("ws://localhost:8088/invocations_ws/ws") as ws: await ws.send(json.dumps({ "action": "invoke", "payload": {"message": "Hello world!"} @@ -36,12 +36,10 @@ async def main(): asyncio.run(main()) **HTTP SSE client** (using ``curl``):: - - curl -N -X POST http://localhost:8088/invocations \\ - -H "Content-Type: application/json" \\ - -d '{"message": "Hello world!"}' + curl -N -X POST http://localhost:8088/invocations -H "Content-Type: application/json" -d '{"message": "Hello world!"}' """ + import asyncio import json from collections.abc import AsyncGenerator @@ -50,8 +48,11 @@ async def main(): from starlette.requests import Request from starlette.responses import Response, StreamingResponse -from azure.ai.agentserver.websocket import WebsocketAgentServerHost, WebsocketContext -from azure.ai.agentserver.invocations import InvocationAgentServerHost +from azure.ai.agentserver.invocations import ( + InvocationAgentServerHost, + InvocationWSAgentServerHost, + InvocationWSContext, +) ECHO_PREFIX = "🔊 Echo: " @@ -69,35 +70,19 @@ async def echo_tokens(message: str) -> AsyncGenerator[dict, None]: await asyncio.sleep(0.1) # simulate token-by-token latency -# ---------------------------------------------------------------------------InvocationAgentServerHost -# Combined host — Websocket (WebSocket) + Invocations (HTTP/SSE) +# --------------------------------------------------------------------------- +# Combined host — InvocationsWS (WebSocket) + Invocations (HTTP/SSE) # --------------------------------------------------------------------------- -class EchoAgentHost(WebsocketAgentServerHost, InvocationAgentServerHost): - """Combined host supporting both Invocations (HTTP/SSE) and Websocket (WebSocket). +class EchoAgentHost(InvocationWSAgentServerHost, InvocationAgentServerHost): + """Combined host supporting both Invocations (HTTP/SSE) and InvocationsWS (WebSocket). - Both parent classes store their handler in ``_invoke_fn`` with incompatible - signatures, so we keep a separate ``_http_invoke_fn`` for the invocations - endpoint and override ``_dispatch_invoke`` to use it. + Both parent classes are composed via cooperative inheritance. The HTTP + invocations handler is registered with ``@app.invoke_handler`` and the + WebSocket handler with ``@app.ws_invoke_handler``. """ - - def __init__(self, **kwargs: object) -> None: - self._http_invoke_fn = None - super().__init__(**kwargs) - - def http_invoke_handler(self, fn): # type: ignore[override] - """Register the HTTP invocation handler.""" - self._http_invoke_fn = fn - return fn - - async def _dispatch_invoke(self, request: Request) -> Response: - """Route HTTP invocations to the dedicated HTTP handler.""" - if self._http_invoke_fn is not None: - return await self._http_invoke_fn(request) - raise NotImplementedError( - "No HTTP invoke handler registered. Use @app.http_invoke_handler." - ) + pass app = EchoAgentHost() @@ -110,7 +95,7 @@ async def _dispatch_invoke(self, request: Request) -> Response: # --------------------------------------------------------------------------- -# HTTP SSE invocation endpoint (via azure-ai-agentserver-invocations) +# HTTP SSE invocation endpoint (via InvocationAgentServerHost) # --------------------------------------------------------------------------- @@ -129,7 +114,7 @@ async def _generate_sse(message: str, invocation_id: str) -> AsyncGenerator[byte yield f"event: done\ndata: {done_payload}\n\n".encode() -@app.http_invoke_handler +@app.invoke_handler async def handle_http_invoke(request: Request) -> Response: """HTTP invocation endpoint — streams tokens as Server-Sent Events. @@ -148,20 +133,20 @@ async def handle_http_invoke(request: Request) -> Response: # --------------------------------------------------------------------------- -# WebSocket websocket endpoint (via azure-ai-agentserver-websocket) +# WebSocket invocation endpoint (via InvocationWSAgentServerHost) # --------------------------------------------------------------------------- -@app.invoke_handler -async def handle_invoke( - payload: dict, context: WebsocketContext # pylint: disable=unused-argument +@app.ws_invoke_handler +async def handle_ws_invoke( + payload: dict, context: InvocationWSContext # pylint: disable=unused-argument ) -> AsyncGenerator[dict, None]: """WebSocket streaming handler — each chunk is a ``stream_chunk`` message. :param payload: The client request payload. :type payload: dict - :param context: Websocket context with IDs. - :type context: WebsocketContext + :param context: Invocation context with IDs. + :type context: InvocationWSContext """ message = payload.get( "message", "Hello! Send me a message and I'll echo it back.") diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/requirements.txt b/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/requirements.txt new file mode 100644 index 000000000000..fe489ac8ac35 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/requirements.txt @@ -0,0 +1,2 @@ +azure-ai-agentserver-invocations +websockets diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/tests/conftest.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/conftest.py index 8a3deb55c72f..4576297cda2a 100644 --- a/sdk/agentserver/azure-ai-agentserver-invocations/tests/conftest.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/conftest.py @@ -13,6 +13,22 @@ from azure.ai.agentserver.invocations import InvocationAgentServerHost +# Re-export WebSocket (invocations_ws) fixtures so they are auto-discovered +# by pytest for tests under this directory. +from conftest_ws import ( # noqa: F401 (re-exported fixtures) + WS_SAMPLE_OPENAPI_SPEC, + ws_async_storage_app, + ws_async_storage_client, + ws_echo_app, + ws_echo_client, + ws_failing_app, + ws_failing_client, + ws_no_spec_client, + ws_streaming_app, + ws_streaming_client, + ws_validated_client, +) + def pytest_configure(config): config.addinivalue_line("markers", "tracing_e2e: end-to-end tracing tests against live Application Insights") diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/tests/conftest_ws.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/conftest_ws.py new file mode 100644 index 000000000000..8e26ff24d508 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/conftest_ws.py @@ -0,0 +1,192 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Shared fixtures and factory functions for WebSocket invocation tests.""" +from typing import Any + +import pytest +from starlette.testclient import TestClient + +from azure.ai.agentserver.invocations import ( + InvocationWSAgentServerHost, + InvocationWSContext, + InvocationWSError, +) + + +# --------------------------------------------------------------------------- +# Sample OpenAPI spec used by several tests +# --------------------------------------------------------------------------- + +WS_SAMPLE_OPENAPI_SPEC: dict[str, Any] = { + "openapi": "3.0.0", + "info": {"title": "Echo Agent", "version": "1.0.0"}, + "paths": { + "/invocations_ws": { + "post": { + "requestBody": { + "required": True, + "content": { + "application/json": { + "schema": { + "type": "object", + "required": ["message"], + "properties": { + "message": {"type": "string"}, + }, + } + } + }, + }, + "responses": { + "200": { + "description": "OK", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "reply": {"type": "string"}, + }, + } + } + }, + } + }, + } + } + }, +} + + +# --------------------------------------------------------------------------- +# Factory functions +# --------------------------------------------------------------------------- + + +def _make_ws_echo_agent(**kwargs: Any) -> InvocationWSAgentServerHost: + """Create an InvocationWSAgentServerHost whose invoke handler echoes the payload.""" + app = InvocationWSAgentServerHost(**kwargs) + + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: + return {"echo": payload, "invocation_id": context.invocation_id} + + return app + + +def _make_ws_streaming_agent(**kwargs: Any) -> InvocationWSAgentServerHost: + """Create an InvocationWSAgentServerHost whose invoke handler yields 3 JSON chunks.""" + app = InvocationWSAgentServerHost(**kwargs) + + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext): + for i in range(3): + yield {"chunk": i} + + return app + + +def _make_ws_async_storage_agent(**kwargs: Any) -> InvocationWSAgentServerHost: + """Create an InvocationWSAgentServerHost with get/cancel handlers and in-memory store.""" + app = InvocationWSAgentServerHost(**kwargs) + store: dict[str, dict] = {} + + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: + store[context.invocation_id] = payload + return {"stored": True, "invocation_id": context.invocation_id} + + @app.ws_get_invocation_handler + async def get_handler(context: InvocationWSContext) -> dict: + if context.invocation_id not in store: + raise InvocationWSError("not_found", "Not found") + return {"data": store[context.invocation_id]} + + @app.ws_cancel_invocation_handler + async def cancel_handler(context: InvocationWSContext) -> dict: + if context.invocation_id not in store: + raise InvocationWSError("not_found", "Not found") + del store[context.invocation_id] + return {"status": "cancelled"} + + return app + + +def _make_ws_validated_agent() -> InvocationWSAgentServerHost: + """Create an InvocationWSAgentServerHost with OpenAPI spec.""" + app = InvocationWSAgentServerHost(openapi_spec=WS_SAMPLE_OPENAPI_SPEC) + + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: + return {"reply": f"echo: {payload['message']}"} + + return app + + +def _make_ws_failing_agent(**kwargs: Any) -> InvocationWSAgentServerHost: + """Create an InvocationWSAgentServerHost whose handler raises ValueError.""" + app = InvocationWSAgentServerHost(**kwargs) + + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: + raise ValueError("something went wrong") + + return app + + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + + +@pytest.fixture() +def ws_echo_app(): + return _make_ws_echo_agent() + + +@pytest.fixture() +def ws_echo_client(ws_echo_app): + return TestClient(ws_echo_app) + + +@pytest.fixture() +def ws_streaming_app(): + return _make_ws_streaming_agent() + + +@pytest.fixture() +def ws_streaming_client(ws_streaming_app): + return TestClient(ws_streaming_app) + + +@pytest.fixture() +def ws_async_storage_app(): + return _make_ws_async_storage_agent() + + +@pytest.fixture() +def ws_async_storage_client(ws_async_storage_app): + return TestClient(ws_async_storage_app) + + +@pytest.fixture() +def ws_validated_client(): + app = _make_ws_validated_agent() + return TestClient(app) + + +@pytest.fixture() +def ws_no_spec_client(): + app = _make_ws_echo_agent() + return TestClient(app) + + +@pytest.fixture() +def ws_failing_app(): + return _make_ws_failing_agent() + + +@pytest.fixture() +def ws_failing_client(ws_failing_app): + return TestClient(ws_failing_app) diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_decorator_pattern.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_decorator_pattern.py new file mode 100644 index 000000000000..8366db498ed5 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_decorator_pattern.py @@ -0,0 +1,209 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +"""Tests for decorator-based handler registration on InvocationWSAgentServerHost.""" +from starlette.testclient import TestClient + +from azure.ai.agentserver.invocations import ( + InvocationWSAgentServerHost, + InvocationWSContext, + InvocationWSError, +) + + +# --------------------------------------------------------------------------- +# ws_invoke_handler stores function +# --------------------------------------------------------------------------- + +def test_ws_invoke_handler_stores_function(): + """@app.ws_invoke_handler stores the function on the protocol object.""" + app = InvocationWSAgentServerHost() + + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: + return {"ok": True} + + assert app._ws_invoke_fn is handle + + +# --------------------------------------------------------------------------- +# ws_invoke_handler returns original function +# --------------------------------------------------------------------------- + +def test_ws_invoke_handler_returns_original_function(): + """@app.ws_invoke_handler returns the original function.""" + app = InvocationWSAgentServerHost() + + async def handle(payload: dict, context: InvocationWSContext) -> dict: + return {"ok": True} + + result = app.ws_invoke_handler(handle) + assert result is handle + + +# --------------------------------------------------------------------------- +# ws_get_invocation_handler stores function +# --------------------------------------------------------------------------- + +def test_ws_get_invocation_handler_stores_function(): + """@app.ws_get_invocation_handler stores the function.""" + app = InvocationWSAgentServerHost() + + @app.ws_get_invocation_handler + async def get_handler(context: InvocationWSContext) -> dict: + return {"ok": True} + + assert app._ws_get_invocation_fn is get_handler + + +# --------------------------------------------------------------------------- +# ws_cancel_invocation_handler stores function +# --------------------------------------------------------------------------- + +def test_ws_cancel_invocation_handler_stores_function(): + """@app.ws_cancel_invocation_handler stores the function.""" + app = InvocationWSAgentServerHost() + + @app.ws_cancel_invocation_handler + async def cancel_handler(context: InvocationWSContext) -> dict: + return {"ok": True} + + assert app._ws_cancel_invocation_fn is cancel_handler + + +# --------------------------------------------------------------------------- +# shutdown_handler stores function +# --------------------------------------------------------------------------- + +def test_ws_shutdown_handler_stores_function(): + """@server.shutdown_handler stores the function on the server.""" + app = InvocationWSAgentServerHost() + + @app.shutdown_handler + async def on_shutdown(): + pass + + assert app._shutdown_fn is on_shutdown + + +# --------------------------------------------------------------------------- +# Full request flow +# --------------------------------------------------------------------------- + +def test_ws_full_request_flow(): + """Full lifecycle: invoke → get → cancel → get (not_found).""" + app = InvocationWSAgentServerHost() + store: dict[str, dict] = {} + + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: + store[context.invocation_id] = payload + return {"stored": True} + + @app.ws_get_invocation_handler + async def get_handler(context: InvocationWSContext) -> dict: + if context.invocation_id not in store: + raise InvocationWSError("not_found", "Not found") + return {"data": store[context.invocation_id]} + + @app.ws_cancel_invocation_handler + async def cancel_handler(context: InvocationWSContext) -> dict: + if context.invocation_id not in store: + raise InvocationWSError("not_found", "Not found") + del store[context.invocation_id] + return {"status": "cancelled"} + + client = TestClient(app) + with client.websocket_connect("/invocations_ws/ws") as ws: + # Invoke + ws.send_json({"action": "invoke", "payload": {"key": "lifecycle-test"}}) + invoke_resp = ws.receive_json() + assert invoke_resp["type"] == "result" + inv_id = invoke_resp["invocation_id"] + + # Get + ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) + get_resp = ws.receive_json() + assert get_resp["type"] == "result" + assert get_resp["payload"]["data"]["key"] == "lifecycle-test" + + # Cancel + ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) + cancel_resp = ws.receive_json() + assert cancel_resp["type"] == "result" + assert cancel_resp["payload"]["status"] == "cancelled" + + # Get after cancel + ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) + get_resp2 = ws.receive_json() + assert get_resp2["type"] == "error" + assert get_resp2["error"]["code"] == "not_found" + + +# --------------------------------------------------------------------------- +# Missing optional handlers +# --------------------------------------------------------------------------- + +def test_ws_missing_invoke_handler_returns_error(): + """Invoke without registered handler returns not_implemented error.""" + app = InvocationWSAgentServerHost() + client = TestClient(app) + with client.websocket_connect("/invocations_ws/ws") as ws: + ws.send_json({"action": "invoke", "payload": {}}) + resp = ws.receive_json() + assert resp["type"] == "error" + assert resp["error"]["code"] == "not_implemented" + + +def test_ws_missing_get_handler_returns_error(): + """get_invocation without registered handler returns not_found error.""" + app = InvocationWSAgentServerHost() + + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: + return {"ok": True} + + client = TestClient(app) + with client.websocket_connect("/invocations_ws/ws") as ws: + ws.send_json({"action": "get_invocation", "invocation_id": "some-id"}) + resp = ws.receive_json() + assert resp["type"] == "error" + assert resp["error"]["code"] == "not_found" + + +def test_ws_missing_cancel_handler_returns_error(): + """cancel_invocation without registered handler returns not_found error.""" + app = InvocationWSAgentServerHost() + + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: + return {"ok": True} + + client = TestClient(app) + with client.websocket_connect("/invocations_ws/ws") as ws: + ws.send_json({"action": "cancel_invocation", "invocation_id": "some-id"}) + resp = ws.receive_json() + assert resp["type"] == "error" + assert resp["error"]["code"] == "not_found" + + +# --------------------------------------------------------------------------- +# Optional handler defaults and overrides +# --------------------------------------------------------------------------- + +def test_ws_optional_handlers_default_none(): + """Get and cancel handlers default to None.""" + app = InvocationWSAgentServerHost() + assert app._ws_get_invocation_fn is None + assert app._ws_cancel_invocation_fn is None + + +def test_ws_optional_handler_override(): + """Setting an optional handler replaces None.""" + app = InvocationWSAgentServerHost() + + @app.ws_get_invocation_handler + async def get_handler(context: InvocationWSContext) -> dict: + return {"ok": True} + + assert app._ws_get_invocation_fn is not None diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_edge_cases.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_edge_cases.py similarity index 56% rename from sdk/agentserver/azure-ai-agentserver-websocket/tests/test_edge_cases.py rename to sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_edge_cases.py index 37e0e5109682..6413690bb375 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_edge_cases.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_edge_cases.py @@ -1,14 +1,14 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Edge-case tests for WebsocketAgentServerHost over WebSocket.""" +"""Edge-case tests for InvocationWSAgentServerHost over WebSocket.""" import uuid from starlette.testclient import TestClient -from azure.ai.agentserver.websocket import ( - WebsocketAgentServerHost, - WebsocketContext, +from azure.ai.agentserver.invocations import ( + InvocationWSAgentServerHost, + InvocationWSContext, ) @@ -16,9 +16,9 @@ # Unknown action # --------------------------------------------------------------------------- -def test_unknown_action_returns_error(echo_client): +def test_ws_unknown_action_returns_error(ws_echo_client): """Sending an unknown action returns an error.""" - with echo_client.websocket_connect("/websocket/ws") as ws: + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "unknown_action", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "error" @@ -29,9 +29,9 @@ def test_unknown_action_returns_error(echo_client): # Invalid JSON # --------------------------------------------------------------------------- -def test_invalid_json_returns_error(echo_client): +def test_ws_invalid_json_returns_error(ws_echo_client): """Sending invalid JSON returns an error but connection stays open.""" - with echo_client.websocket_connect("/websocket/ws") as ws: + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_text("not valid json {{{") resp = ws.receive_json() assert resp["type"] == "error" @@ -43,9 +43,9 @@ def test_invalid_json_returns_error(echo_client): assert resp2["type"] == "result" -def test_non_object_json_returns_error(echo_client): +def test_ws_non_object_json_returns_error(ws_echo_client): """Sending a JSON array instead of object returns an error.""" - with echo_client.websocket_connect("/websocket/ws") as ws: + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_text("[1, 2, 3]") resp = ws.receive_json() assert resp["type"] == "error" @@ -53,33 +53,33 @@ def test_non_object_json_returns_error(echo_client): # --------------------------------------------------------------------------- -# Websocket ID handling +# Invocation ID handling # --------------------------------------------------------------------------- -def test_websocket_id_auto_generated(echo_client): - """Websocket ID is auto-generated when not provided.""" - with echo_client.websocket_connect("/websocket/ws") as ws: +def test_ws_invocation_id_auto_generated(ws_echo_client): + """Invocation ID is auto-generated when not provided.""" + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() - assert "websocket_id" in resp - uuid.UUID(resp["websocket_id"]) + assert "invocation_id" in resp + uuid.UUID(resp["invocation_id"]) -def test_websocket_id_accepted_from_message(echo_client): - """Server accepts websocket ID from message field.""" +def test_ws_invocation_id_accepted_from_message(ws_echo_client): + """Server accepts invocation ID from message field.""" custom_id = str(uuid.uuid4()) - with echo_client.websocket_connect("/websocket/ws") as ws: - ws.send_json({"action": "invoke", "websocket_id": custom_id, "payload": {}}) + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: + ws.send_json({"action": "invoke", "invocation_id": custom_id, "payload": {}}) resp = ws.receive_json() - assert resp["websocket_id"] == custom_id + assert resp["invocation_id"] == custom_id -def test_websocket_id_generated_when_empty(echo_client): - """When empty websocket ID is sent, server generates one.""" - with echo_client.websocket_connect("/websocket/ws") as ws: - ws.send_json({"action": "invoke", "websocket_id": "", "payload": {}}) +def test_ws_invocation_id_generated_when_empty(ws_echo_client): + """When empty invocation ID is sent, server generates one.""" + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: + ws.send_json({"action": "invoke", "invocation_id": "", "payload": {}}) resp = ws.receive_json() - inv_id = resp["websocket_id"] + inv_id = resp["invocation_id"] uuid.UUID(inv_id) @@ -87,20 +87,20 @@ def test_websocket_id_generated_when_empty(echo_client): # Payload edge cases # --------------------------------------------------------------------------- -def test_large_payload(echo_client): +def test_ws_large_payload(ws_echo_client): """Large payload (dict with big value) is handled correctly.""" big_value = "x" * (1024 * 1024) - with echo_client.websocket_connect("/websocket/ws") as ws: + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {"data": big_value}}) resp = ws.receive_json() assert resp["type"] == "result" assert len(resp["payload"]["echo"]["data"]) == 1024 * 1024 -def test_unicode_payload(echo_client): +def test_ws_unicode_payload(ws_echo_client): """Unicode payload is preserved.""" text = "Hello, 世界! 🌍" - with echo_client.websocket_connect("/websocket/ws") as ws: + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {"text": text}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -111,122 +111,122 @@ def test_unicode_payload(echo_client): # Streaming edge cases # --------------------------------------------------------------------------- -def test_empty_streaming(): +def test_ws_empty_streaming(): """Empty streaming response (no chunks) sends only stream_end.""" - app = WebsocketAgentServerHost() + app = InvocationWSAgentServerHost() - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext): + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext): return yield # noqa: E501 — make it a generator client = TestClient(app) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "stream_end" -def test_streaming_has_websocket_id(): - """Streaming messages include websocket_id.""" - app = WebsocketAgentServerHost() +def test_ws_streaming_has_invocation_id(): + """Streaming messages include invocation_id.""" + app = InvocationWSAgentServerHost() - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext): + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext): yield {"chunk": "data"} client = TestClient(app) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() - assert "websocket_id" in resp + assert "invocation_id" in resp # --------------------------------------------------------------------------- -# Websocket lifecycle +# Invocation lifecycle # --------------------------------------------------------------------------- -def test_multiple_gets(async_storage_client): - """Multiple gets for the same websocket return the same result.""" - with async_storage_client.websocket_connect("/websocket/ws") as ws: +def test_ws_multiple_gets(ws_async_storage_client): + """Multiple gets for the same invocation return the same result.""" + with ws_async_storage_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "multi-get"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["websocket_id"] + inv_id = invoke_resp["invocation_id"] for _ in range(3): - ws.send_json({"action": "get_websocket", "websocket_id": inv_id}) + ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) get_resp = ws.receive_json() assert get_resp["type"] == "result" assert get_resp["payload"]["data"]["key"] == "multi-get" -def test_double_cancel(async_storage_client): +def test_ws_double_cancel(ws_async_storage_client): """Cancelling twice: second cancel returns error.""" - with async_storage_client.websocket_connect("/websocket/ws") as ws: + with ws_async_storage_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "cancel-twice"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["websocket_id"] + inv_id = invoke_resp["invocation_id"] - ws.send_json({"action": "cancel_websocket", "websocket_id": inv_id}) + ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) cancel1 = ws.receive_json() assert cancel1["type"] == "result" - ws.send_json({"action": "cancel_websocket", "websocket_id": inv_id}) + ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) cancel2 = ws.receive_json() assert cancel2["type"] == "error" assert cancel2["error"]["code"] == "not_found" -def test_invoke_cancel_get(async_storage_client): +def test_ws_invoke_cancel_get(ws_async_storage_client): """Invoke -> cancel -> get returns not_found error.""" - with async_storage_client.websocket_connect("/websocket/ws") as ws: + with ws_async_storage_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "icg"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["websocket_id"] + inv_id = invoke_resp["invocation_id"] - ws.send_json({"action": "cancel_websocket", "websocket_id": inv_id}) + ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) ws.receive_json() # consume cancel response - ws.send_json({"action": "get_websocket", "websocket_id": inv_id}) + ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) get_resp = ws.receive_json() assert get_resp["type"] == "error" assert get_resp["error"]["code"] == "not_found" # --------------------------------------------------------------------------- -# Multiple sequential websocket calls on same connection +# Multiple sequential invocation calls on same connection # --------------------------------------------------------------------------- -def test_multiple_sequential_websocket_calls(echo_client): - """Multiple sequential websocket calls on the same WebSocket connection.""" - with echo_client.websocket_connect("/websocket/ws") as ws: +def test_ws_multiple_sequential_invocation_calls(ws_echo_client): + """Multiple sequential invocation calls on the same WebSocket connection.""" + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: ids = set() for i in range(10): ws.send_json({"action": "invoke", "payload": {"idx": i}}) resp = ws.receive_json() assert resp["type"] == "result" assert resp["payload"]["echo"]["idx"] == i - ids.add(resp["websocket_id"]) + ids.add(resp["invocation_id"]) assert len(ids) == 10 # --------------------------------------------------------------------------- -# get/cancel without websocket_id +# get/cancel without invocation_id # --------------------------------------------------------------------------- -def test_get_without_websocket_id(echo_client): - """get_websocket without websocket_id returns error.""" - with echo_client.websocket_connect("/websocket/ws") as ws: - ws.send_json({"action": "get_websocket"}) +def test_ws_get_without_invocation_id(ws_echo_client): + """get_invocation without invocation_id returns error.""" + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: + ws.send_json({"action": "get_invocation"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "invalid_request" -def test_cancel_without_websocket_id(echo_client): - """cancel_websocket without websocket_id returns error.""" - with echo_client.websocket_connect("/websocket/ws") as ws: - ws.send_json({"action": "cancel_websocket"}) +def test_ws_cancel_without_invocation_id(ws_echo_client): + """cancel_invocation without invocation_id returns error.""" + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: + ws.send_json({"action": "cancel_invocation"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "invalid_request" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_get_cancel.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_get_cancel.py similarity index 50% rename from sdk/agentserver/azure-ai-agentserver-websocket/tests/test_get_cancel.py rename to sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_get_cancel.py index b8629a340810..f8eebcef29c3 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_get_cancel.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_get_cancel.py @@ -1,13 +1,13 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Tests for get_websocket and cancel_websocket actions over WebSocket.""" +"""Tests for get_invocation and cancel_invocation actions over WebSocket.""" from starlette.testclient import TestClient -from azure.ai.agentserver.websocket import ( - WebsocketAgentServerHost, - WebsocketContext, - WebsocketError, +from azure.ai.agentserver.invocations import ( + InvocationWSAgentServerHost, + InvocationWSContext, + InvocationWSError, ) @@ -15,14 +15,14 @@ # GET after invoke # --------------------------------------------------------------------------- -def test_get_after_invoke_returns_stored_result(async_storage_client): - """get_websocket after invoke returns the stored result.""" - with async_storage_client.websocket_connect("/websocket/ws") as ws: +def test_ws_get_after_invoke_returns_stored_result(ws_async_storage_client): + """get_invocation after invoke returns the stored result.""" + with ws_async_storage_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "stored-data"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["websocket_id"] + inv_id = invoke_resp["invocation_id"] - ws.send_json({"action": "get_websocket", "websocket_id": inv_id}) + ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) get_resp = ws.receive_json() assert get_resp["type"] == "result" @@ -33,10 +33,10 @@ def test_get_after_invoke_returns_stored_result(async_storage_client): # GET unknown ID # --------------------------------------------------------------------------- -def test_get_unknown_id_returns_error(async_storage_client): - """get_websocket with unknown ID returns error.""" - with async_storage_client.websocket_connect("/websocket/ws") as ws: - ws.send_json({"action": "get_websocket", "websocket_id": "unknown-id-12345"}) +def test_ws_get_unknown_id_returns_error(ws_async_storage_client): + """get_invocation with unknown ID returns error.""" + with ws_async_storage_client.websocket_connect("/invocations_ws/ws") as ws: + ws.send_json({"action": "get_invocation", "invocation_id": "unknown-id-12345"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "not_found" @@ -46,14 +46,14 @@ def test_get_unknown_id_returns_error(async_storage_client): # Cancel after invoke # --------------------------------------------------------------------------- -def test_cancel_after_invoke_returns_cancelled(async_storage_client): - """cancel_websocket after invoke returns cancelled status.""" - with async_storage_client.websocket_connect("/websocket/ws") as ws: +def test_ws_cancel_after_invoke_returns_cancelled(ws_async_storage_client): + """cancel_invocation after invoke returns cancelled status.""" + with ws_async_storage_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "cancel-me"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["websocket_id"] + inv_id = invoke_resp["invocation_id"] - ws.send_json({"action": "cancel_websocket", "websocket_id": inv_id}) + ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) cancel_resp = ws.receive_json() assert cancel_resp["type"] == "result" @@ -64,10 +64,10 @@ def test_cancel_after_invoke_returns_cancelled(async_storage_client): # Cancel unknown ID # --------------------------------------------------------------------------- -def test_cancel_unknown_id_returns_error(async_storage_client): - """cancel_websocket with unknown ID returns error.""" - with async_storage_client.websocket_connect("/websocket/ws") as ws: - ws.send_json({"action": "cancel_websocket", "websocket_id": "unknown-id-12345"}) +def test_ws_cancel_unknown_id_returns_error(ws_async_storage_client): + """cancel_invocation with unknown ID returns error.""" + with ws_async_storage_client.websocket_connect("/invocations_ws/ws") as ws: + ws.send_json({"action": "cancel_invocation", "invocation_id": "unknown-id-12345"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "not_found" @@ -77,17 +77,17 @@ def test_cancel_unknown_id_returns_error(async_storage_client): # GET after cancel # --------------------------------------------------------------------------- -def test_get_after_cancel_returns_error(async_storage_client): - """get_websocket after cancel returns error (data removed).""" - with async_storage_client.websocket_connect("/websocket/ws") as ws: +def test_ws_get_after_cancel_returns_error(ws_async_storage_client): + """get_invocation after cancel returns error (data removed).""" + with ws_async_storage_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "temp"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["websocket_id"] + inv_id = invoke_resp["invocation_id"] - ws.send_json({"action": "cancel_websocket", "websocket_id": inv_id}) + ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) ws.receive_json() # consume cancel response - ws.send_json({"action": "get_websocket", "websocket_id": inv_id}) + ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) get_resp = ws.receive_json() assert get_resp["type"] == "error" @@ -98,21 +98,21 @@ def test_get_after_cancel_returns_error(async_storage_client): # GET error returns internal_error # --------------------------------------------------------------------------- -def test_get_websocket_error_returns_internal_error(): - """get_websocket handler raising an exception returns internal_error.""" - app = WebsocketAgentServerHost() +def test_ws_get_invocation_error_returns_internal_error(): + """get_invocation handler raising an exception returns internal_error.""" + app = InvocationWSAgentServerHost() - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: return {"ok": True} - @app.get_websocket_handler - async def get_handler(context: WebsocketContext) -> dict: + @app.ws_get_invocation_handler + async def get_handler(context: InvocationWSContext) -> dict: raise RuntimeError("get failed") client = TestClient(app) - with client.websocket_connect("/websocket/ws") as ws: - ws.send_json({"action": "get_websocket", "websocket_id": "some-id"}) + with client.websocket_connect("/invocations_ws/ws") as ws: + ws.send_json({"action": "get_invocation", "invocation_id": "some-id"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "internal_error" @@ -122,21 +122,21 @@ async def get_handler(context: WebsocketContext) -> dict: # Cancel error returns internal_error # --------------------------------------------------------------------------- -def test_cancel_websocket_error_returns_internal_error(): - """cancel_websocket handler raising an exception returns internal_error.""" - app = WebsocketAgentServerHost() +def test_ws_cancel_invocation_error_returns_internal_error(): + """cancel_invocation handler raising an exception returns internal_error.""" + app = InvocationWSAgentServerHost() - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: return {"ok": True} - @app.cancel_websocket_handler - async def cancel_handler(context: WebsocketContext) -> dict: + @app.ws_cancel_invocation_handler + async def cancel_handler(context: InvocationWSContext) -> dict: raise RuntimeError("cancel failed") client = TestClient(app) - with client.websocket_connect("/websocket/ws") as ws: - ws.send_json({"action": "cancel_websocket", "websocket_id": "some-id"}) + with client.websocket_connect("/invocations_ws/ws") as ws: + ws.send_json({"action": "cancel_invocation", "invocation_id": "some-id"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "internal_error" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_graceful_shutdown.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_graceful_shutdown.py similarity index 74% rename from sdk/agentserver/azure-ai-agentserver-websocket/tests/test_graceful_shutdown.py rename to sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_graceful_shutdown.py index c67f9f4315fc..c3f8412c0a57 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_graceful_shutdown.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_graceful_shutdown.py @@ -1,16 +1,16 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Tests for graceful shutdown with WebsocketAgentServerHost.""" +"""Tests for graceful shutdown with InvocationWSAgentServerHost.""" import asyncio import logging import pytest from starlette.testclient import TestClient -from azure.ai.agentserver.websocket import ( - WebsocketAgentServerHost, - WebsocketContext, +from azure.ai.agentserver.invocations import ( + InvocationWSAgentServerHost, + InvocationWSContext, ) @@ -18,13 +18,13 @@ # Helpers # --------------------------------------------------------------------------- -def _make_server_with_shutdown(**kwargs) -> tuple[WebsocketAgentServerHost, list]: - """Create WebsocketAgentServerHost with a tracked shutdown handler.""" - server = WebsocketAgentServerHost(**kwargs) +def _make_server_with_shutdown(**kwargs) -> tuple[InvocationWSAgentServerHost, list]: + """Create InvocationWSAgentServerHost with a tracked shutdown handler.""" + server = InvocationWSAgentServerHost(**kwargs) calls: list[str] = [] - @server.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: + @server.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: return {"ok": True} @server.shutdown_handler @@ -38,18 +38,18 @@ async def on_shutdown(): # Shutdown handler registration # --------------------------------------------------------------------------- -def test_shutdown_handler_registered(): +def test_ws_shutdown_handler_registered(): """Shutdown handler is stored on the server.""" server, _ = _make_server_with_shutdown() assert server._shutdown_fn is not None -def test_shutdown_handler_not_registered(): +def test_ws_shutdown_handler_not_registered(): """Without @shutdown_handler, _shutdown_fn is None.""" - app = WebsocketAgentServerHost() + app = InvocationWSAgentServerHost() - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: return {"ok": True} assert app._shutdown_fn is None @@ -85,7 +85,7 @@ async def send(message): # --------------------------------------------------------------------------- @pytest.mark.asyncio -async def test_shutdown_handler_called_on_lifespan_exit(): +async def test_ws_shutdown_handler_called_on_lifespan_exit(): """Shutdown handler runs when the ASGI lifespan exits.""" server, calls = _make_server_with_shutdown() completed = await _drive_lifespan(server) @@ -98,13 +98,13 @@ async def test_shutdown_handler_called_on_lifespan_exit(): # --------------------------------------------------------------------------- @pytest.mark.asyncio -async def test_shutdown_handler_timeout(caplog): +async def test_ws_shutdown_handler_timeout(caplog): """Shutdown handler that exceeds timeout is warned about.""" - server = WebsocketAgentServerHost(graceful_shutdown_timeout=1) + server = InvocationWSAgentServerHost(graceful_shutdown_timeout=1) calls: list[str] = [] - @server.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: + @server.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: return {"ok": True} @server.shutdown_handler @@ -124,12 +124,12 @@ async def on_shutdown(): # --------------------------------------------------------------------------- @pytest.mark.asyncio -async def test_shutdown_handler_exception(caplog): +async def test_ws_shutdown_handler_exception(caplog): """Shutdown handler that raises is caught and logged.""" - app = WebsocketAgentServerHost() + app = InvocationWSAgentServerHost() - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: return {"ok": True} @app.shutdown_handler @@ -146,21 +146,21 @@ async def on_shutdown(): # Graceful shutdown timeout config # --------------------------------------------------------------------------- -def test_default_graceful_shutdown_timeout(): +def test_ws_default_graceful_shutdown_timeout(): """Default graceful shutdown timeout is 30 seconds.""" - app = WebsocketAgentServerHost() + app = InvocationWSAgentServerHost() assert app._graceful_shutdown_timeout == 30 -def test_custom_graceful_shutdown_timeout(): +def test_ws_custom_graceful_shutdown_timeout(): """Custom graceful_shutdown_timeout is stored.""" - server = WebsocketAgentServerHost(graceful_shutdown_timeout=60) + server = InvocationWSAgentServerHost(graceful_shutdown_timeout=60) assert server._graceful_shutdown_timeout == 60 -def test_zero_graceful_shutdown_timeout(): +def test_ws_zero_graceful_shutdown_timeout(): """Zero timeout disables the drain period.""" - server = WebsocketAgentServerHost(graceful_shutdown_timeout=0) + server = InvocationWSAgentServerHost(graceful_shutdown_timeout=0) assert server._graceful_shutdown_timeout == 0 @@ -168,7 +168,7 @@ def test_zero_graceful_shutdown_timeout(): # Health endpoint accessible during normal operation # --------------------------------------------------------------------------- -def test_health_endpoint_during_operation(): +def test_ws_health_endpoint_during_operation(): """GET /readiness returns 200 during normal operation.""" server, _ = _make_server_with_shutdown() client = TestClient(server) @@ -181,16 +181,16 @@ def test_health_endpoint_during_operation(): # No shutdown handler is no-op # --------------------------------------------------------------------------- -def test_no_shutdown_handler_is_noop(): +def test_ws_no_shutdown_handler_is_noop(): """Without a shutdown handler, WebSocket and lifespan work fine.""" - app = WebsocketAgentServerHost() + app = InvocationWSAgentServerHost() - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: return {"ok": True} client = TestClient(app) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -200,11 +200,11 @@ async def handle(payload: dict, context: WebsocketContext) -> dict: # Multiple requests before shutdown # --------------------------------------------------------------------------- -def test_multiple_requests_before_shutdown(): +def test_ws_multiple_requests_before_shutdown(): """Multiple requests can be served on the same WebSocket connection.""" server, _ = _make_server_with_shutdown() client = TestClient(server) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: for i in range(5): ws.send_json({"action": "invoke", "payload": {"idx": i}}) resp = ws.receive_json() diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_invoke.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_invoke.py similarity index 58% rename from sdk/agentserver/azure-ai-agentserver-websocket/tests/test_invoke.py rename to sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_invoke.py index f3606e3e98c4..50a3958b86ea 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_invoke.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_invoke.py @@ -1,19 +1,19 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Tests for the invoke action over WebSocket.""" +"""Tests for the invoke action over WebSocket (invocations_ws protocol).""" import uuid -from azure.ai.agentserver.websocket import WebsocketContext +from azure.ai.agentserver.invocations import InvocationWSContext # --------------------------------------------------------------------------- # Echo payload # --------------------------------------------------------------------------- -def test_invoke_echo_payload(echo_client): +def test_ws_invoke_echo_payload(ws_echo_client): """Invoke echoes the payload back.""" - with echo_client.websocket_connect("/websocket/ws") as ws: + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {"msg": "hello world"}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -24,51 +24,51 @@ def test_invoke_echo_payload(echo_client): # IDs # --------------------------------------------------------------------------- -def test_invoke_returns_websocket_id(echo_client): - """Response includes a valid UUID websocket_id.""" - with echo_client.websocket_connect("/websocket/ws") as ws: +def test_ws_invoke_returns_invocation_id(ws_echo_client): + """Response includes a valid UUID invocation_id.""" + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() - assert "websocket_id" in resp - uuid.UUID(resp["websocket_id"]) + assert "invocation_id" in resp + uuid.UUID(resp["invocation_id"]) -def test_invoke_returns_session_id(echo_client): +def test_ws_invoke_returns_session_id(ws_echo_client): """Response includes a valid UUID session_id.""" - with echo_client.websocket_connect("/websocket/ws") as ws: + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert "session_id" in resp uuid.UUID(resp["session_id"]) -def test_invoke_unique_websocket_ids(echo_client): - """Each invoke gets a unique websocket ID.""" +def test_ws_invoke_unique_invocation_ids(ws_echo_client): + """Each invoke gets a unique invocation ID.""" ids = set() - with echo_client.websocket_connect("/websocket/ws") as ws: + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: for _ in range(5): ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() - ids.add(resp["websocket_id"]) + ids.add(resp["invocation_id"]) assert len(ids) == 5 -def test_invoke_accepts_custom_websocket_id(echo_client): - """If the message includes websocket_id, the server uses it.""" +def test_ws_invoke_accepts_custom_invocation_id(ws_echo_client): + """If the message includes invocation_id, the server uses it.""" custom_id = str(uuid.uuid4()) - with echo_client.websocket_connect("/websocket/ws") as ws: - ws.send_json({"action": "invoke", "websocket_id": custom_id, "payload": {}}) + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: + ws.send_json({"action": "invoke", "invocation_id": custom_id, "payload": {}}) resp = ws.receive_json() - assert resp["websocket_id"] == custom_id + assert resp["invocation_id"] == custom_id # --------------------------------------------------------------------------- # Streaming # --------------------------------------------------------------------------- -def test_streaming_returns_chunks(streaming_client): +def test_ws_streaming_returns_chunks(ws_streaming_client): """Streaming handler yields 3 chunks then stream_end.""" - with streaming_client.websocket_connect("/websocket/ws") as ws: + with ws_streaming_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) chunks = [] while True: @@ -82,22 +82,22 @@ def test_streaming_returns_chunks(streaming_client): assert chunk == {"chunk": i} -def test_streaming_has_websocket_id(streaming_client): - """Streaming messages include websocket_id.""" - with streaming_client.websocket_connect("/websocket/ws") as ws: +def test_ws_streaming_has_invocation_id(ws_streaming_client): + """Streaming messages include invocation_id.""" + with ws_streaming_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() - assert "websocket_id" in resp - uuid.UUID(resp["websocket_id"]) + assert "invocation_id" in resp + uuid.UUID(resp["invocation_id"]) # --------------------------------------------------------------------------- # Empty payload # --------------------------------------------------------------------------- -def test_invoke_empty_payload(echo_client): +def test_ws_invoke_empty_payload(ws_echo_client): """Empty payload doesn't crash the server.""" - with echo_client.websocket_connect("/websocket/ws") as ws: + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -107,9 +107,9 @@ def test_invoke_empty_payload(echo_client): # Error handling # --------------------------------------------------------------------------- -def test_invoke_error_returns_error(failing_client): +def test_ws_invoke_error_returns_error(ws_failing_client): """Handler exception returns error message.""" - with failing_client.websocket_connect("/websocket/ws") as ws: + with ws_failing_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "error" @@ -117,17 +117,17 @@ def test_invoke_error_returns_error(failing_client): assert resp["error"]["message"] == "Internal server error" -def test_invoke_error_has_websocket_id(failing_client): - """Error response still includes websocket_id.""" - with failing_client.websocket_connect("/websocket/ws") as ws: +def test_ws_invoke_error_has_invocation_id(ws_failing_client): + """Error response still includes invocation_id.""" + with ws_failing_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() - assert "websocket_id" in resp + assert "invocation_id" in resp -def test_error_hides_details_by_default(failing_client): +def test_ws_error_hides_details_by_default(ws_failing_client): """Exception message is hidden in error responses.""" - with failing_client.websocket_connect("/websocket/ws") as ws: + with ws_failing_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert "something went wrong" not in resp["error"]["message"] diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_ws_keepalive.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_keepalive.py similarity index 71% rename from sdk/agentserver/azure-ai-agentserver-websocket/tests/test_ws_keepalive.py rename to sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_keepalive.py index 18714b2a50e4..a725c9ca1e53 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_ws_keepalive.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_keepalive.py @@ -1,20 +1,20 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Tests for WebSocket ping/pong keep-alive.""" +"""Tests for WebSocket ping/pong keep-alive (invocations_ws protocol).""" from starlette.testclient import TestClient -from azure.ai.agentserver.websocket import ( - WebsocketAgentServerHost, - WebsocketContext, +from azure.ai.agentserver.invocations import ( + InvocationWSAgentServerHost, + InvocationWSContext, ) def _make_echo_app(**kwargs): - app = WebsocketAgentServerHost(**kwargs) + app = InvocationWSAgentServerHost(**kwargs) - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: return {"echo": payload} return app @@ -24,19 +24,19 @@ async def handle(payload: dict, context: WebsocketContext) -> dict: # Client-initiated ping → server responds with pong # --------------------------------------------------------------------------- -def test_client_ping_gets_pong(): +def test_ws_client_ping_gets_pong(): """Server replies with pong when client sends a ping action.""" client = TestClient(_make_echo_app()) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "ping"}) resp = ws.receive_json() assert resp["type"] == "pong" -def test_client_ping_does_not_interrupt_invoke(): - """A ping/pong exchange between websocket calls doesn't break the connection.""" +def test_ws_client_ping_does_not_interrupt_invoke(): + """A ping/pong exchange between invocation calls doesn't break the connection.""" client = TestClient(_make_echo_app()) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: # Normal invoke ws.send_json({"action": "invoke", "payload": {"n": 1}}) r1 = ws.receive_json() @@ -54,10 +54,10 @@ def test_client_ping_does_not_interrupt_invoke(): assert r2["payload"]["echo"]["n"] == 2 -def test_client_pong_is_accepted_silently(): +def test_ws_client_pong_is_accepted_silently(): """Server accepts pong action without returning an error.""" client = TestClient(_make_echo_app()) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "pong"}) # No response expected for pong — verify next invoke still works. ws.send_json({"action": "invoke", "payload": {"ok": True}}) @@ -69,21 +69,21 @@ def test_client_pong_is_accepted_silently(): # ws_ping_interval=0 disables server-side pings # --------------------------------------------------------------------------- -def test_ping_disabled_with_zero_interval(): +def test_ws_ping_disabled_with_zero_interval(): """Setting ws_ping_interval=0 disables the background ping task.""" app = _make_echo_app(ws_ping_interval=0) client = TestClient(app) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "result" -def test_custom_ping_interval(): +def test_ws_custom_ping_interval(): """A custom ws_ping_interval is accepted without error.""" app = _make_echo_app(ws_ping_interval=15) client = TestClient(app) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "result" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_multimodal_protocol.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_multimodal_protocol.py similarity index 76% rename from sdk/agentserver/azure-ai-agentserver-websocket/tests/test_multimodal_protocol.py rename to sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_multimodal_protocol.py index 7f0aff0eb992..0bd38af2d097 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_multimodal_protocol.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_multimodal_protocol.py @@ -1,14 +1,14 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Tests for varied payloads with WebsocketAgentServerHost over WebSocket.""" +"""Tests for varied payloads with InvocationWSAgentServerHost over WebSocket.""" import base64 from starlette.testclient import TestClient -from azure.ai.agentserver.websocket import ( - WebsocketAgentServerHost, - WebsocketContext, +from azure.ai.agentserver.invocations import ( + InvocationWSAgentServerHost, + InvocationWSContext, ) @@ -16,12 +16,12 @@ # Helper: echo agent with content type tracking # --------------------------------------------------------------------------- -def _make_content_type_echo_agent() -> WebsocketAgentServerHost: +def _make_content_type_echo_agent() -> InvocationWSAgentServerHost: """Agent that echoes payload and notes the content_type field.""" - app = WebsocketAgentServerHost() + app = InvocationWSAgentServerHost() - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: return { "echo": payload, "received_content_type": payload.get("content_type", "unknown"), @@ -30,12 +30,12 @@ async def handle(payload: dict, context: WebsocketContext) -> dict: return app -def _make_sse_agent() -> WebsocketAgentServerHost: +def _make_sse_agent() -> InvocationWSAgentServerHost: """Agent that returns streaming chunks.""" - app = WebsocketAgentServerHost() + app = InvocationWSAgentServerHost() - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext): + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext): for i in range(3): yield {"event": i} @@ -46,12 +46,12 @@ async def handle(payload: dict, context: WebsocketContext): # Various content types (base64-encoded binary data in JSON) # --------------------------------------------------------------------------- -def test_png_payload(): +def test_ws_png_payload(): """PNG content type payload is accepted and echoed.""" server = _make_content_type_echo_agent() client = TestClient(server) fake_png = b"\x89PNG\r\n\x1a\n" + b"\x00" * 100 - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({ "action": "invoke", "payload": { @@ -65,12 +65,12 @@ def test_png_payload(): assert base64.b64decode(resp["payload"]["echo"]["data_base64"]) == fake_png -def test_jpeg_payload(): +def test_ws_jpeg_payload(): """JPEG content type payload is accepted.""" server = _make_content_type_echo_agent() client = TestClient(server) fake_jpeg = b"\xff\xd8\xff\xe0" + b"\x00" * 100 - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({ "action": "invoke", "payload": { @@ -83,12 +83,12 @@ def test_jpeg_payload(): assert resp["payload"]["received_content_type"] == "image/jpeg" -def test_wav_payload(): +def test_ws_wav_payload(): """WAV audio content type payload is accepted.""" server = _make_content_type_echo_agent() client = TestClient(server) fake_wav = b"RIFF" + b"\x00" * 100 - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({ "action": "invoke", "payload": { @@ -101,11 +101,11 @@ def test_wav_payload(): assert resp["payload"]["received_content_type"] == "audio/wav" -def test_text_plain_payload(): +def test_ws_text_plain_payload(): """text/plain content type payload is accepted.""" server = _make_content_type_echo_agent() client = TestClient(server) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({ "action": "invoke", "payload": { @@ -122,16 +122,16 @@ def test_text_plain_payload(): # Query-like parameters in payload # --------------------------------------------------------------------------- -def test_params_in_payload(): +def test_ws_params_in_payload(): """Arbitrary parameters are accessible in the handler payload.""" - app = WebsocketAgentServerHost() + app = InvocationWSAgentServerHost() - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: return {"name": payload.get("name", "unknown")} client = TestClient(app) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {"name": "Alice"}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -142,11 +142,11 @@ async def handle(payload: dict, context: WebsocketContext) -> dict: # Streaming # --------------------------------------------------------------------------- -def test_streaming_chunks(): +def test_ws_streaming_chunks(): """Streaming response sends multiple chunks.""" server = _make_sse_agent() client = TestClient(server) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) chunks = [] while True: @@ -164,12 +164,12 @@ def test_streaming_chunks(): # Health endpoint # --------------------------------------------------------------------------- -def test_health_endpoint_returns_200(): +def test_ws_health_endpoint_returns_200(): """GET /readiness returns 200 with healthy status.""" - app = WebsocketAgentServerHost() + app = InvocationWSAgentServerHost() - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: return {"ok": True} client = TestClient(app) diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_request_limits.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_request_limits.py similarity index 64% rename from sdk/agentserver/azure-ai-agentserver-websocket/tests/test_request_limits.py rename to sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_request_limits.py index 3695258f7567..2f46ef531613 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_request_limits.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_request_limits.py @@ -7,37 +7,37 @@ import pytest from starlette.testclient import TestClient -from azure.ai.agentserver.websocket import ( - WebsocketAgentServerHost, - WebsocketContext, +from azure.ai.agentserver.invocations import ( + InvocationWSAgentServerHost, + InvocationWSContext, ) # --------------------------------------------------------------------------- -# WebsocketAgentServerHost no longer accepts request_timeout +# InvocationWSAgentServerHost no longer accepts request_timeout # --------------------------------------------------------------------------- -def test_no_request_timeout_parameter(): - """WebsocketAgentServerHost no longer accepts request_timeout.""" +def test_ws_no_request_timeout_parameter(): + """InvocationWSAgentServerHost no longer accepts request_timeout.""" with pytest.raises(TypeError): - WebsocketAgentServerHost(request_timeout=10) + InvocationWSAgentServerHost(request_timeout=10) # --------------------------------------------------------------------------- # Slow invoke completes without timeout # --------------------------------------------------------------------------- -def test_slow_invoke_completes(): +def test_ws_slow_invoke_completes(): """Without timeout, handler runs to completion.""" - app = WebsocketAgentServerHost() + app = InvocationWSAgentServerHost() - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: await asyncio.sleep(0.1) return {"status": "done"} client = TestClient(app) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "result" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_server_routes.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_server_routes.py similarity index 52% rename from sdk/agentserver/azure-ai-agentserver-websocket/tests/test_server_routes.py rename to sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_server_routes.py index 2fc9a571db4d..10316ef0a4a0 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_server_routes.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_server_routes.py @@ -1,41 +1,41 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Tests for basic server route registration with WebsocketAgentServerHost.""" +"""Tests for basic server route registration with InvocationWSAgentServerHost.""" import uuid from starlette.testclient import TestClient -from azure.ai.agentserver.websocket import ( - WebsocketAgentServerHost, - WebsocketContext, +from azure.ai.agentserver.invocations import ( + InvocationWSAgentServerHost, + InvocationWSContext, ) -from conftest import SAMPLE_OPENAPI_SPEC +from conftest_ws import WS_SAMPLE_OPENAPI_SPEC # --------------------------------------------------------------------------- -# WebSocket connection /websocket/ws +# WebSocket connection /invocations_ws/ws # --------------------------------------------------------------------------- -def test_websocket_invoke_returns_result(echo_client): +def test_ws_invoke_returns_result(ws_echo_client): """Invoke via WebSocket returns a result.""" - with echo_client.websocket_connect("/websocket/ws") as ws: + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {"test": True}}) resp = ws.receive_json() assert resp["type"] == "result" # --------------------------------------------------------------------------- -# Websocket ID is valid UUID +# Invocation ID is valid UUID # --------------------------------------------------------------------------- -def test_invoke_returns_uuid_websocket_id(echo_client): - """Invoke returns a valid UUID websocket ID.""" - with echo_client.websocket_connect("/websocket/ws") as ws: +def test_ws_invoke_returns_uuid_invocation_id(ws_echo_client): + """Invoke returns a valid UUID invocation ID.""" + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() - inv_id = resp["websocket_id"] + inv_id = resp["invocation_id"] parsed = uuid.UUID(inv_id) assert str(parsed) == inv_id @@ -44,9 +44,9 @@ def test_invoke_returns_uuid_websocket_id(echo_client): # GET openapi spec returns 404 when not set # --------------------------------------------------------------------------- -def test_get_openapi_spec_returns_404_when_not_set(no_spec_client): - """GET /websocket/docs/openapi.json returns 404 when no spec registered.""" - resp = no_spec_client.get("/websocket/docs/openapi.json") +def test_ws_get_openapi_spec_returns_404_when_not_set(ws_no_spec_client): + """GET /invocations_ws/docs/openapi.json returns 404 when no spec registered.""" + resp = ws_no_spec_client.get("/invocations_ws/docs/openapi.json") assert resp.status_code == 404 @@ -54,28 +54,28 @@ def test_get_openapi_spec_returns_404_when_not_set(no_spec_client): # GET openapi spec returns spec when registered # --------------------------------------------------------------------------- -def test_get_openapi_spec_returns_spec_when_registered(): - """GET /websocket/docs/openapi.json returns the spec when registered.""" - app = WebsocketAgentServerHost(openapi_spec=SAMPLE_OPENAPI_SPEC) +def test_ws_get_openapi_spec_returns_spec_when_registered(): + """GET /invocations_ws/docs/openapi.json returns the spec when registered.""" + app = InvocationWSAgentServerHost(openapi_spec=WS_SAMPLE_OPENAPI_SPEC) - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: return {"ok": True} client = TestClient(app) - resp = client.get("/websocket/docs/openapi.json") + resp = client.get("/invocations_ws/docs/openapi.json") assert resp.status_code == 200 - assert resp.json() == SAMPLE_OPENAPI_SPEC + assert resp.json() == WS_SAMPLE_OPENAPI_SPEC # --------------------------------------------------------------------------- -# get_websocket returns not_found error by default +# get_invocation returns not_found error by default # --------------------------------------------------------------------------- -def test_get_websocket_returns_not_found_default(echo_client): - """get_websocket without handler returns not_found error.""" - with echo_client.websocket_connect("/websocket/ws") as ws: - ws.send_json({"action": "get_websocket", "websocket_id": "some-id"}) +def test_ws_get_invocation_returns_not_found_default(ws_echo_client): + """get_invocation without handler returns not_found error.""" + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: + ws.send_json({"action": "get_invocation", "invocation_id": "some-id"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "not_found" @@ -85,10 +85,10 @@ def test_get_websocket_returns_not_found_default(echo_client): # cancel returns not_found error by default # --------------------------------------------------------------------------- -def test_cancel_websocket_returns_not_found_default(echo_client): - """cancel_websocket without handler returns not_found error.""" - with echo_client.websocket_connect("/websocket/ws") as ws: - ws.send_json({"action": "cancel_websocket", "websocket_id": "some-id"}) +def test_ws_cancel_invocation_returns_not_found_default(ws_echo_client): + """cancel_invocation without handler returns not_found error.""" + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: + ws.send_json({"action": "cancel_invocation", "invocation_id": "some-id"}) resp = ws.receive_json() assert resp["type"] == "error" assert resp["error"]["code"] == "not_found" @@ -98,7 +98,7 @@ def test_cancel_websocket_returns_not_found_default(echo_client): # Unknown HTTP route returns 404 # --------------------------------------------------------------------------- -def test_unknown_route_returns_404(echo_client): +def test_ws_unknown_route_returns_404(ws_echo_client): """Unknown route returns 404.""" - resp = echo_client.get("/nonexistent") + resp = ws_echo_client.get("/nonexistent") assert resp.status_code == 404 diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_session_id.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_session_id.py similarity index 61% rename from sdk/agentserver/azure-ai-agentserver-websocket/tests/test_session_id.py rename to sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_session_id.py index 1b3422727538..3a2e47fdf14a 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_session_id.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_session_id.py @@ -1,16 +1,16 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Tests for session ID resolution over WebSocket.""" +"""Tests for session ID resolution over WebSocket (invocations_ws protocol).""" import os import uuid from unittest.mock import patch from starlette.testclient import TestClient -from azure.ai.agentserver.websocket import ( - WebsocketAgentServerHost, - WebsocketContext, +from azure.ai.agentserver.invocations import ( + InvocationWSAgentServerHost, + InvocationWSContext, ) @@ -18,9 +18,9 @@ # Invoke response has session_id # --------------------------------------------------------------------------- -def test_invoke_has_session_id(echo_client): +def test_ws_invoke_has_session_id(ws_echo_client): """Invoke response includes session_id.""" - with echo_client.websocket_connect("/websocket/ws") as ws: + with ws_echo_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert "session_id" in resp @@ -31,16 +31,16 @@ def test_invoke_has_session_id(echo_client): # Invoke with session_id in message uses that value # --------------------------------------------------------------------------- -def test_invoke_with_session_id_in_message(): +def test_ws_invoke_with_session_id_in_message(): """Invoke with session_id in message uses that value.""" - app = WebsocketAgentServerHost() + app = InvocationWSAgentServerHost() - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: return {"ok": True} client = TestClient(app) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({ "action": "invoke", "session_id": "my-custom-session", @@ -54,51 +54,51 @@ async def handle(payload: dict, context: WebsocketContext) -> dict: # Invoke with env var # --------------------------------------------------------------------------- -def test_invoke_uses_env_var(): +def test_ws_invoke_uses_env_var(): """Invoke uses FOUNDRY_AGENT_SESSION_ID env var when no session_id in message.""" - app = WebsocketAgentServerHost() + app = InvocationWSAgentServerHost() - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: return {"ok": True} client = TestClient(app) with patch.dict(os.environ, {"FOUNDRY_AGENT_SESSION_ID": "env-session"}): - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["session_id"] == "env-session" # --------------------------------------------------------------------------- -# get_websocket does NOT include session_id (not part of get protocol) +# get_invocation does NOT include session_id (not part of get protocol) # --------------------------------------------------------------------------- -def test_get_websocket_no_session_id(async_storage_client): - """get_websocket response does not include session_id.""" - with async_storage_client.websocket_connect("/websocket/ws") as ws: +def test_ws_get_invocation_no_session_id(ws_async_storage_client): + """get_invocation response does not include session_id.""" + with ws_async_storage_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "data"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["websocket_id"] + inv_id = invoke_resp["invocation_id"] - ws.send_json({"action": "get_websocket", "websocket_id": inv_id}) + ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) get_resp = ws.receive_json() assert get_resp["type"] == "result" assert "session_id" not in get_resp # --------------------------------------------------------------------------- -# cancel_websocket does NOT include session_id +# cancel_invocation does NOT include session_id # --------------------------------------------------------------------------- -def test_cancel_websocket_no_session_id(async_storage_client): - """cancel_websocket response does not include session_id.""" - with async_storage_client.websocket_connect("/websocket/ws") as ws: +def test_ws_cancel_invocation_no_session_id(ws_async_storage_client): + """cancel_invocation response does not include session_id.""" + with ws_async_storage_client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "data"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["websocket_id"] + inv_id = invoke_resp["invocation_id"] - ws.send_json({"action": "cancel_websocket", "websocket_id": inv_id}) + ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) cancel_resp = ws.receive_json() assert cancel_resp["type"] == "result" assert "session_id" not in cancel_resp diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_span_parenting.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_span_parenting.py similarity index 81% rename from sdk/agentserver/azure-ai-agentserver-websocket/tests/test_span_parenting.py rename to sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_span_parenting.py index fbf0aecf9de3..dbf2a774340a 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_span_parenting.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_span_parenting.py @@ -2,7 +2,8 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- """Tests that the invoke_agent span is set as the current span in context, -so that child spans created by framework handlers are correctly parented. +so that child spans created by framework handlers are correctly parented +(WebSocket invocations_ws protocol). """ import os from unittest.mock import patch @@ -10,9 +11,9 @@ import pytest from starlette.testclient import TestClient -from azure.ai.agentserver.websocket import ( - WebsocketAgentServerHost, - WebsocketContext, +from azure.ai.agentserver.invocations import ( + InvocationWSAgentServerHost, + InvocationWSContext, ) @@ -30,7 +31,11 @@ _EXPORTER = InMemorySpanExporter() _PROVIDER = SdkTracerProvider() _PROVIDER.add_span_processor(SimpleSpanProcessor(_EXPORTER)) - trace.set_tracer_provider(_PROVIDER) + existing = trace.get_tracer_provider() + if isinstance(existing, SdkTracerProvider) and existing is not _PROVIDER: + existing.add_span_processor(SimpleSpanProcessor(_EXPORTER)) + else: + trace.set_tracer_provider(_PROVIDER) else: _EXPORTER = None @@ -51,11 +56,11 @@ def _make_server_with_child_span(): """Server whose handler creates a child span (simulating a framework).""" with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): - app = WebsocketAgentServerHost() + app = InvocationWSAgentServerHost() child_tracer = trace.get_tracer("test.framework") - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: with child_tracer.start_as_current_span("framework_invoke_agent") as _span: return {"ok": True} @@ -66,11 +71,11 @@ def _make_streaming_server_with_child_span(): """Server with streaming response whose handler creates a child span.""" with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): - app = WebsocketAgentServerHost() + app = InvocationWSAgentServerHost() child_tracer = trace.get_tracer("test.framework") - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext): + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext): with child_tracer.start_as_current_span("framework_invoke_agent"): yield {"chunk": "data"} @@ -97,12 +102,12 @@ def _assert_child_parented(spans, streaming=False): ) -def test_framework_span_is_child_of_invoke_span(): +def test_ws_framework_span_is_child_of_invoke_span(): """A span created inside the handler should be a child of the agentserver invoke_agent span, not a sibling.""" server = _make_server_with_child_span() client = TestClient(server) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "result" @@ -110,11 +115,11 @@ def test_framework_span_is_child_of_invoke_span(): _assert_child_parented(_get_spans(), streaming=False) -def test_framework_span_is_child_streaming(): +def test_ws_framework_span_is_child_streaming(): """Same parent-child relationship holds for streaming responses.""" server = _make_streaming_server_with_child_span() client = TestClient(server) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) while True: resp = ws.receive_json() diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_tracing.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_tracing.py similarity index 73% rename from sdk/agentserver/azure-ai-agentserver-websocket/tests/test_tracing.py rename to sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_tracing.py index 341dd719cc57..99c1b42742d7 100644 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_tracing.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_tracing.py @@ -1,7 +1,7 @@ # --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -"""Tests for OpenTelemetry tracing in the WebSocket websocket protocol.""" +"""Tests for OpenTelemetry tracing in the WebSocket invocations_ws protocol.""" import os import uuid from unittest.mock import patch @@ -9,10 +9,10 @@ import pytest from starlette.testclient import TestClient -from azure.ai.agentserver.websocket import ( - WebsocketAgentServerHost, - WebsocketContext, - WebsocketError, +from azure.ai.agentserver.invocations import ( + InvocationWSAgentServerHost, + InvocationWSContext, + InvocationWSError, ) @@ -34,9 +34,6 @@ _MODULE_EXPORTER = InMemorySpanExporter() _MODULE_PROVIDER = SdkTracerProvider() _MODULE_PROVIDER.add_span_processor(SimpleSpanProcessor(_MODULE_EXPORTER)) - # If a provider was already set (e.g. by test_span_parenting), add our - # exporter to the existing provider as well, so we capture spans regardless - # of module import order. existing = trace.get_tracer_provider() if isinstance(existing, SdkTracerProvider) and existing is not _MODULE_PROVIDER: existing.add_span_processor(SimpleSpanProcessor(_MODULE_EXPORTER)) @@ -68,13 +65,13 @@ def _get_spans(): # --------------------------------------------------------------------------- def _make_tracing_server(**kwargs): - """Create an WebsocketAgentServerHost with tracing enabled.""" + """Create an InvocationWSAgentServerHost with tracing enabled.""" with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): - server = WebsocketAgentServerHost(**kwargs) + server = InvocationWSAgentServerHost(**kwargs) - @server.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: + @server.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: return {"echo": payload} return server @@ -84,27 +81,27 @@ def _make_tracing_server_with_get_cancel(**kwargs): """Create a tracing-enabled server with get/cancel handlers.""" with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): - server = WebsocketAgentServerHost(**kwargs) + server = InvocationWSAgentServerHost(**kwargs) store: dict[str, dict] = {} - @server.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: - store[context.websocket_id] = payload + @server.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: + store[context.invocation_id] = payload return {"stored": True} - @server.get_websocket_handler - async def get_handler(context: WebsocketContext) -> dict: - if context.websocket_id in store: - return {"data": store[context.websocket_id]} - raise WebsocketError("not_found", "Not found") + @server.ws_get_invocation_handler + async def get_handler(context: InvocationWSContext) -> dict: + if context.invocation_id in store: + return {"data": store[context.invocation_id]} + raise InvocationWSError("not_found", "Not found") - @server.cancel_websocket_handler - async def cancel_handler(context: WebsocketContext) -> dict: - if context.websocket_id in store: - del store[context.websocket_id] + @server.ws_cancel_invocation_handler + async def cancel_handler(context: InvocationWSContext) -> dict: + if context.invocation_id in store: + del store[context.invocation_id] return {"status": "cancelled"} - raise WebsocketError("not_found", "Not found") + raise InvocationWSError("not_found", "Not found") return server @@ -113,10 +110,10 @@ def _make_failing_tracing_server(**kwargs): """Create a tracing-enabled server whose handler raises.""" with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): - server = WebsocketAgentServerHost(**kwargs) + server = InvocationWSAgentServerHost(**kwargs) - @server.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: + @server.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: raise ValueError("tracing error test") return server @@ -126,10 +123,10 @@ def _make_streaming_tracing_server(**kwargs): """Create a tracing-enabled server with streaming response.""" with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): - server = WebsocketAgentServerHost(**kwargs) + server = InvocationWSAgentServerHost(**kwargs) - @server.invoke_handler - async def handle(payload: dict, context: WebsocketContext): + @server.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext): yield {"chunk": 1} yield {"chunk": 2} @@ -140,19 +137,19 @@ async def handle(payload: dict, context: WebsocketContext): # Tracing disabled by default # --------------------------------------------------------------------------- -def test_tracing_disabled_by_default(): +def test_ws_tracing_disabled_by_default(): """No spans are created when tracing is not enabled.""" if _MODULE_EXPORTER: _MODULE_EXPORTER.clear() - app = WebsocketAgentServerHost() + app = InvocationWSAgentServerHost() - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: return {"ok": True} client = TestClient(app) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) ws.receive_json() @@ -165,11 +162,11 @@ async def handle(payload: dict, context: WebsocketContext) -> dict: # Tracing enabled creates invoke span # --------------------------------------------------------------------------- -def test_tracing_enabled_creates_invoke_span(): +def test_ws_tracing_enabled_creates_invoke_span(): """Tracing enabled creates a span named 'invoke_agent'.""" server = _make_tracing_server() client = TestClient(server) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) ws.receive_json() @@ -183,11 +180,11 @@ def test_tracing_enabled_creates_invoke_span(): # Invoke error records exception # --------------------------------------------------------------------------- -def test_invoke_error_records_exception(): +def test_ws_invoke_error_records_exception(): """When handler raises, the span records the exception.""" server = _make_failing_tracing_server() client = TestClient(server) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) resp = ws.receive_json() assert resp["type"] == "error" @@ -203,37 +200,37 @@ def test_invoke_error_records_exception(): # GET/cancel create spans # --------------------------------------------------------------------------- -def test_get_websocket_creates_span(): - """get_websocket creates a span.""" +def test_ws_get_invocation_creates_span(): + """get_invocation creates a span.""" server = _make_tracing_server_with_get_cancel() client = TestClient(server) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "data"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["websocket_id"] + inv_id = invoke_resp["invocation_id"] - ws.send_json({"action": "get_websocket", "websocket_id": inv_id}) + ws.send_json({"action": "get_invocation", "invocation_id": inv_id}) ws.receive_json() spans = _get_spans() - get_spans = [s for s in spans if "get_websocket" in s.name] + get_spans = [s for s in spans if "get_invocation" in s.name] assert len(get_spans) >= 1 -def test_cancel_websocket_creates_span(): - """cancel_websocket creates a span.""" +def test_ws_cancel_invocation_creates_span(): + """cancel_invocation creates a span.""" server = _make_tracing_server_with_get_cancel() client = TestClient(server) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {"key": "data"}}) invoke_resp = ws.receive_json() - inv_id = invoke_resp["websocket_id"] + inv_id = invoke_resp["invocation_id"] - ws.send_json({"action": "cancel_websocket", "websocket_id": inv_id}) + ws.send_json({"action": "cancel_invocation", "invocation_id": inv_id}) ws.receive_json() spans = _get_spans() - cancel_spans = [s for s in spans if "cancel_websocket" in s.name] + cancel_spans = [s for s in spans if "cancel_invocation" in s.name] assert len(cancel_spans) >= 1 @@ -241,18 +238,18 @@ def test_cancel_websocket_creates_span(): # Tracing via env var # --------------------------------------------------------------------------- -def test_tracing_via_appinsights_env_var(): +def test_ws_tracing_via_appinsights_env_var(): """Tracing is enabled when APPLICATIONINSIGHTS_CONNECTION_STRING is set.""" with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): - app = WebsocketAgentServerHost() + app = InvocationWSAgentServerHost() - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: return {"ok": True} client = TestClient(app) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) ws.receive_json() @@ -265,23 +262,23 @@ async def handle(payload: dict, context: WebsocketContext) -> dict: # No tracing when no endpoints configured # --------------------------------------------------------------------------- -def test_no_tracing_when_no_endpoints(): +def test_ws_no_tracing_when_no_endpoints(): """Tracing is disabled when no connection string or OTLP endpoint is set.""" env = os.environ.copy() env.pop("APPLICATIONINSIGHTS_CONNECTION_STRING", None) env.pop("OTEL_EXPORTER_OTLP_ENDPOINT", None) with patch.dict(os.environ, env, clear=True): - app = WebsocketAgentServerHost() + app = InvocationWSAgentServerHost() - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: + @app.ws_invoke_handler + async def handle(payload: dict, context: InvocationWSContext) -> dict: return {"ok": True} if _MODULE_EXPORTER: _MODULE_EXPORTER.clear() client = TestClient(app) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) ws.receive_json() @@ -294,11 +291,11 @@ async def handle(payload: dict, context: WebsocketContext) -> dict: # Streaming spans # --------------------------------------------------------------------------- -def test_streaming_creates_span(): +def test_ws_streaming_creates_span(): """Streaming response creates and completes a span.""" server = _make_streaming_tracing_server() client = TestClient(server) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) # Consume all streaming messages while True: @@ -315,11 +312,11 @@ def test_streaming_creates_span(): # GenAI attributes on invoke span # --------------------------------------------------------------------------- -def test_genai_attributes_on_invoke_span(): +def test_ws_genai_attributes_on_invoke_span(): """Invoke span has GenAI semantic convention attributes.""" server = _make_tracing_server() client = TestClient(server) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({"action": "invoke", "payload": {}}) ws.receive_json() @@ -337,11 +334,11 @@ def test_genai_attributes_on_invoke_span(): # Session ID in gen_ai.conversation.id # --------------------------------------------------------------------------- -def test_session_id_in_websocket_id(): +def test_ws_session_id_in_invocation_id(): """Session ID is set as gen_ai.conversation.id on invoke span.""" server = _make_tracing_server() client = TestClient(server) - with client.websocket_connect("/websocket/ws") as ws: + with client.websocket_connect("/invocations_ws/ws") as ws: ws.send_json({ "action": "invoke", "session_id": "test-session", diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/CHANGELOG.md b/sdk/agentserver/azure-ai-agentserver-websocket/CHANGELOG.md deleted file mode 100644 index 11cc5ef64e21..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/CHANGELOG.md +++ /dev/null @@ -1,15 +0,0 @@ -# Release History - -## 1.0.0b1 (Unreleased) - -### Features Added - -- Initial release of `azure-ai-agentserver-websocket`. -- `WebsocketHandler` for wiring websocket protocol endpoints to an `AgentHost`. -- Decorator-based handler registration (`@websocket.invoke_handler`). -- Optional `GET /websocket/{id}` and `POST /websocket/{id}/cancel` endpoints. -- `GET /websocket/docs/openapi.json` for OpenAPI spec serving. -- Websocket ID tracking and session correlation via `agent_session_id` query parameter. -- Distributed tracing with GenAI semantic convention span attributes. -- W3C Baggage propagation for cross-service correlation. -- Streaming response support with span lifecycle management. diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/LICENSE b/sdk/agentserver/azure-ai-agentserver-websocket/LICENSE deleted file mode 100644 index 4c3581d3b052..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -Copyright (c) Microsoft Corporation. - -MIT License - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/MANIFEST.in b/sdk/agentserver/azure-ai-agentserver-websocket/MANIFEST.in deleted file mode 100644 index 15f469d979fd..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/MANIFEST.in +++ /dev/null @@ -1,8 +0,0 @@ -include *.md -include LICENSE -recursive-include tests *.py -recursive-include samples *.py *.md -include azure/__init__.py -include azure/ai/__init__.py -include azure/ai/agentserver/__init__.py -include azure/ai/agentserver/websocket/py.typed diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/README.md b/sdk/agentserver/azure-ai-agentserver-websocket/README.md deleted file mode 100644 index 1cb92c53d201..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/README.md +++ /dev/null @@ -1,292 +0,0 @@ -# Azure AI AgentServerHost Websocket for Python (WebSocket) - -The `azure-ai-agentserver-websocket` package provides the websocket protocol over **WebSocket long connections** for Azure AI Hosted Agent containers. It plugs into the [`azure-ai-agentserver-core`](https://pypi.org/project/azure-ai-agentserver-core/) host framework and exposes a single WebSocket endpoint at `/websocket/ws` that supports invoke, get, cancel, and streaming operations. - -## Getting started - -### Install the package - -```bash -pip install azure-ai-agentserver-websocket -``` - -This automatically installs `azure-ai-agentserver-core` as a dependency. - -### Prerequisites - -- Python 3.10 or later - -## Key concepts - -### WebsocketAgentServerHost - -`WebsocketAgentServerHost` is an `AgentServerHost` subclass that adds a WebSocket endpoint for the websocket protocol. It provides decorator methods for registering handler functions: - -- `@app.invoke_handler` — **Required.** Handles `invoke` actions. Supports both async functions (non-streaming) and async generators (streaming). -- `@app.get_websocket_handler` — Optional. Handles `get_websocket` actions. -- `@app.cancel_websocket_handler` — Optional. Handles `cancel_websocket` actions. - -### WebsocketContext - -Handler functions receive an `WebsocketContext` object containing: - -- `context.websocket_id` — The websocket ID (echoed from client or auto-generated UUID). -- `context.session_id` — The resolved session ID. - -### WebsocketError - -Handlers can raise `WebsocketError(code, message)` to return a domain-specific error to the client without exposing internal details. - -### WebSocket endpoint - -All websocket operations use a single persistent WebSocket connection: - -| Route | Description | -|---|---| -| `ws://host:port/websocket/ws` | WebSocket endpoint for all websocket operations | -| `GET /websocket/docs/openapi.json` | Serve the agent's OpenAPI 3.x spec (HTTP) | -| `GET /readiness` | Health check (HTTP) | - -### Client → Server messages - -All messages are JSON text frames with an `action` field: - -```json -{"action": "invoke", "payload": {...}, "websocket_id": "optional", "session_id": "optional"} -{"action": "get_websocket", "websocket_id": "required"} -{"action": "cancel_websocket", "websocket_id": "required"} -{"action": "ping"} -{"action": "pong"} -``` - -### Server → Client messages - -```json -{"type": "result", "websocket_id": "...", "session_id": "...", "payload": {...}} -{"type": "stream_chunk", "websocket_id": "...", "session_id": "...", "payload": {...}} -{"type": "stream_end", "websocket_id": "...", "session_id": "..."} -{"type": "error", "websocket_id": "...", "error": {"code": "...", "message": "..."}} -{"type": "ping"} -{"type": "pong"} -``` - -### WebSocket keep-alive (ping/pong) - -Azure APIM and Azure Load Balancer silently drop idle WebSocket connections after approximately 4 minutes, even though the backend supports 60-minute connections. To prevent this, the server sends periodic `{"type": "ping"}` messages to each connected client. - -- **Default interval**: 30 seconds (well within the ~4-minute idle timeout). -- **Disable**: Pass `ws_ping_interval=0` to `WebsocketAgentServerHost()`. -- **Custom interval**: Pass any positive integer, e.g. `ws_ping_interval=15`. - -Clients should respond with `{"action": "pong"}` when they receive a `{"type": "ping"}` message. Clients may also send `{"action": "ping"}` at any time; the server replies with `{"type": "pong"}`. - -```python -app = WebsocketAgentServerHost(ws_ping_interval=20) # ping every 20 seconds -``` - -### Session ID resolution - -Session IDs group related websocket sessions. The SDK resolves the session ID in order: - -1. `session_id` field in the WebSocket message -2. `FOUNDRY_AGENT_SESSION_ID` environment variable -3. Auto-generated UUID - -### Distributed tracing - -When tracing is enabled on the `AgentServerHost`, websocket spans are automatically created with GenAI semantic conventions: - -- **Span name**: `invoke_agent {FOUNDRY_AGENT_NAME}:{FOUNDRY_AGENT_VERSION}` -- **Span attributes**: `gen_ai.system`, `gen_ai.operation.name`, `gen_ai.response.id`, `gen_ai.conversation.id`, `gen_ai.agent.id`, `gen_ai.agent.name`, `gen_ai.agent.version` -- **Error tags**: `azure.ai.agentserver.websocket.error.code`, `.error.message` - -## Examples - -### Simple agent - -```python -from azure.ai.agentserver.websocket import WebsocketAgentServerHost, WebsocketContext - -app = WebsocketAgentServerHost() - - -@app.invoke_handler -async def handle(payload: dict, context: WebsocketContext) -> dict: - return {"greeting": f"Hello, {payload['name']}!"} - -app.run() -``` - -**Client** (using the `websockets` library): - -```python -import asyncio, json, websockets - -async def main(): - async with websockets.connect("ws://localhost:8088/websocket/ws") as ws: - await ws.send(json.dumps({ - "action": "invoke", - "payload": {"name": "Alice"} - })) - while True: - msg = json.loads(await ws.recv()) - if msg["type"] == "ping": - await ws.send(json.dumps({"action": "pong"})) - elif msg["type"] == "result": - print(msg["payload"]["greeting"]) # Hello, Alice! - break - -asyncio.run(main()) -``` - -### Long-running operations with get/cancel - -```python -import asyncio - -from azure.ai.agentserver.websocket import ( - WebsocketAgentServerHost, - WebsocketContext, - WebsocketError, -) - -_tasks: dict[str, asyncio.Task] = {} -_results: dict[str, dict] = {} - -app = WebsocketAgentServerHost() - - -@app.invoke_handler -async def handle(payload: dict, context: WebsocketContext) -> dict: - task = asyncio.create_task(do_work(context.websocket_id, payload)) - _tasks[context.websocket_id] = task - return {"websocket_id": context.websocket_id, "status": "running"} - - -@app.get_websocket_handler -async def get_websocket(context: WebsocketContext) -> dict: - if context.websocket_id in _results: - return _results[context.websocket_id] - if context.websocket_id in _tasks: - return {"websocket_id": context.websocket_id, "status": "running"} - raise WebsocketError("not_found", "Websocket not found") - - -@app.cancel_websocket_handler -async def cancel_websocket(context: WebsocketContext) -> dict: - if context.websocket_id in _tasks: - _tasks[context.websocket_id].cancel() - del _tasks[context.websocket_id] - return {"websocket_id": context.websocket_id, "status": "cancelled"} - raise WebsocketError("not_found", "Websocket not found") -``` - -### Streaming - -Use an async generator to stream chunks back to the client. Each yielded dict is sent as a `stream_chunk` message, followed by a `stream_end` when the generator completes. - -```python -from azure.ai.agentserver.websocket import WebsocketAgentServerHost, WebsocketContext - -app = WebsocketAgentServerHost() - - -@app.invoke_handler -async def handle(payload: dict, context: WebsocketContext): - for word in ["Hello", " ", "world", "!"]: - yield {"delta": word} -``` - -**Client**: - -```python -import asyncio, json, websockets - -async def main(): - async with websockets.connect("ws://localhost:8088/websocket/ws") as ws: - await ws.send(json.dumps({"action": "invoke", "payload": {}})) - while True: - msg = json.loads(await ws.recv()) - if msg["type"] == "stream_chunk": - print(msg["payload"]["delta"], end="", flush=True) - elif msg["type"] == "stream_end": - print("\nDone!") - break - elif msg["type"] == "ping": - await ws.send(json.dumps({"action": "pong"})) - -asyncio.run(main()) -``` - -### Multi-turn conversation - -Use the `session_id` field to group websocket sessions over the same WebSocket connection: - -```python -import asyncio, json, websockets - -async def main(): - async with websockets.connect("ws://localhost:8088/websocket/ws") as ws: - # First turn - await ws.send(json.dumps({ - "action": "invoke", - "session_id": "session-abc", - "payload": {"message": "My name is Alice"}, - })) - print(json.loads(await ws.recv())) - - # Second turn (same session, same connection) - await ws.send(json.dumps({ - "action": "invoke", - "session_id": "session-abc", - "payload": {"message": "What is my name?"}, - })) - print(json.loads(await ws.recv())) - -asyncio.run(main()) -``` - -### Serving an OpenAPI spec - -Pass an OpenAPI spec dict to enable the discovery endpoint at `GET /websocket/docs/openapi.json`: - -```python -app = WebsocketAgentServerHost(openapi_spec={ - "openapi": "3.0.3", - "info": {"title": "My Agent", "version": "1.0.0"}, - "paths": { ... }, -}) -``` - -## Troubleshooting - -### Reporting issues - -To report an issue with the client library, or request additional features, please open a GitHub issue [here](https://github.com/Azure/azure-sdk-for-python/issues). - -## Next steps - -Visit the [Samples](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-websocket/samples) folder for complete working examples: - -| Sample | Description | -|---|---| -| [streaming_invoke_agent](https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_invoke_agent/) | Streaming code-generation tokens via WebSocket | - -## Contributing - -This project welcomes contributions and suggestions. Most contributions require -you to agree to a Contributor License Agreement (CLA) declaring that you have -the right to, and actually do, grant us the rights to use your contribution. -For details, visit https://cla.microsoft.com. - -When you submit a pull request, a CLA-bot will automatically determine whether -you need to provide a CLA and decorate the PR appropriately (e.g., label, -comment). Simply follow the instructions provided by the bot. You will only -need to do this once across all repos using our CLA. - -This project has adopted the -[Microsoft Open Source Code of Conduct][code_of_conduct]. For more information, -see the Code of Conduct FAQ or contact opencode@microsoft.com with any -additional questions or comments. - -[code_of_conduct]: https://opensource.microsoft.com/codeofconduct/ diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/__init__.py b/sdk/agentserver/azure-ai-agentserver-websocket/azure/__init__.py deleted file mode 100644 index 8db66d3d0f0f..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/azure/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/__init__.py b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/__init__.py deleted file mode 100644 index 8db66d3d0f0f..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/__init__.py b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/__init__.py deleted file mode 100644 index 8db66d3d0f0f..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/__init__.py b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/__init__.py deleted file mode 100644 index 5712cd812f93..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -"""Websocket protocol for Azure AI Hosted Agents. - -This package provides an websocket protocol host as a subclass of -:class:`~azure.ai.agentserver.core.AgentServerHost`. - -Quick start:: - - from azure.ai.agentserver.websocket import WebsocketAgentServerHost - from starlette.responses import JSONResponse - - app = WebsocketAgentServerHost() - - @app.invoke_handler - async def handle(request): - return JSONResponse({"ok": True}) - - app.run() -""" -__path__ = __import__("pkgutil").extend_path(__path__, __name__) - -from ._websocket import WebsocketAgentServerHost, WebsocketContext, WebsocketError -from ._version import VERSION - -__all__ = ["WebsocketAgentServerHost", "WebsocketContext", "WebsocketError"] -__version__ = VERSION diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/_version.py b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/_version.py deleted file mode 100644 index 67d209a8cafd..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/_version.py +++ /dev/null @@ -1,5 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- - -VERSION = "1.0.0b1" diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/py.typed b/sdk/agentserver/azure-ai-agentserver-websocket/azure/ai/agentserver/websocket/py.typed deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/cspell.json b/sdk/agentserver/azure-ai-agentserver-websocket/cspell.json deleted file mode 100644 index 9e5aa4ee4889..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/cspell.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "ignoreWords": [ - "agentserver", - "appinsights", - "ASGI", - "autouse", - "caplog", - "genai", - "hypercorn", - "websocket", - "openapi", - "paramtype", - "pytestmark", - "rtype", - "starlette", - "traceparent", - "tracestate", - "tracecontext" - ], - "ignorePaths": [ - "*.csv", - "*.json", - "*.rst", - "samples/**" - ] -} diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/dev_requirements.txt b/sdk/agentserver/azure-ai-agentserver-websocket/dev_requirements.txt deleted file mode 100644 index e7af80133df7..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/dev_requirements.txt +++ /dev/null @@ -1,7 +0,0 @@ --e ../../../eng/tools/azure-sdk-tools -../azure-ai-agentserver-core -pytest -httpx -pytest-asyncio -opentelemetry-api>=1.20.0 -opentelemetry-sdk>=1.20.0 diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/pyproject.toml b/sdk/agentserver/azure-ai-agentserver-websocket/pyproject.toml deleted file mode 100644 index d5dc0a51522b..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/pyproject.toml +++ /dev/null @@ -1,68 +0,0 @@ -[project] -name = "azure-ai-agentserver-websocket" -dynamic = ["version", "readme"] -description = "Websocket protocol for Azure AI Hosted Agents" -requires-python = ">=3.10" -authors = [ - { name = "Microsoft Corporation", email = "azpysdkhelp@microsoft.com" }, -] -license = "MIT" -classifiers = [ - "Development Status :: 4 - Beta", - "Programming Language :: Python", - "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", - "Programming Language :: Python :: 3.13", - "Programming Language :: Python :: 3.14", -] -keywords = ["azure", "azure sdk", "agent", "agentserver", "websocket"] - -dependencies = [ - "azure-ai-agentserver-core>=2.0.0b1", -] - -[build-system] -requires = ["setuptools>=69", "wheel"] -build-backend = "setuptools.build_meta" - -[project.urls] -repository = "https://github.com/Azure/azure-sdk-for-python" - -[tool.setuptools.packages.find] -exclude = [ - "tests*", - "samples*", - "doc*", - "azure", - "azure.ai", - "azure.ai.agentserver", -] - -[tool.setuptools.dynamic] -version = { attr = "azure.ai.agentserver.websocket._version.VERSION" } -readme = { file = ["README.md"], content-type = "text/markdown" } - -[tool.setuptools.package-data] -"azure.ai.agentserver.websocket" = ["py.typed"] - -[tool.ruff] -line-length = 120 -target-version = "py310" -lint.select = ["E", "F", "B", "I"] -lint.ignore = [] -fix = false - -[tool.ruff.lint.isort] -known-first-party = ["azure.ai.agentserver.websocket"] -combine-as-imports = true - -[tool.azure-sdk-build] -breaking = false -mypy = true -pyright = true -verifytypes = true -pylint = true -type_check_samples = false diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/pyrightconfig.json b/sdk/agentserver/azure-ai-agentserver-websocket/pyrightconfig.json deleted file mode 100644 index f36c5a7fe0d3..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/pyrightconfig.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "reportOptionalMemberAccess": "warning", - "reportArgumentType": "warning", - "reportAttributeAccessIssue": "warning", - "reportMissingImports": "warning", - "reportGeneralTypeIssues": "warning", - "reportReturnType": "warning", - "exclude": [ - "**/samples/**" - ] -} diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/.dockerignore b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/.dockerignore deleted file mode 100644 index 0f0d55d2aeca..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/.dockerignore +++ /dev/null @@ -1,6 +0,0 @@ -.venv -__pycache__ -*.pyc -*.pyo -*.pyd -.Python diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/Dockerfile b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/Dockerfile deleted file mode 100644 index 0893a6584a1d..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/Dockerfile +++ /dev/null @@ -1,21 +0,0 @@ -FROM python:3.12-slim - -WORKDIR /app - -COPY . user_agent/ -WORKDIR /app/user_agent - -RUN if [ -f requirements.txt ]; then \ - pip install --no-input -r requirements.txt; \ - else \ - echo "No requirements.txt found"; \ - fi - -EXPOSE 8088 - -CMD bash -c '\ - if [ -f /etc/ssl/certs/adc-egress-proxy-ca.crt ]; then \ - cat /etc/ssl/certs/adc-egress-proxy-ca.crt >> /etc/ssl/certs/ca-certificates.crt && \ - cat /etc/ssl/certs/adc-egress-proxy-ca.crt >> $(python -c "import certifi; print(certifi.where())"); \ - fi && \ - python main.py' diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.manifest.yaml b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.manifest.yaml deleted file mode 100644 index ebb342e56334..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/agent.manifest.yaml +++ /dev/null @@ -1,16 +0,0 @@ -name: echo-agent-websocket-websocket-streaming -description: > - A simple echo agent that streams responses word-by-word using the - azure-ai-agentserver-websocket SDK with WebSocket streaming. -metadata: - tags: - - AI Agent Hosting - - Azure AI AgentServer - - Websocket Protocol - - Streaming -template: - name: echo-agent-websocket-streaming - kind: hosted - protocols: - - protocol: websocket - version: v0.0.1 diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/requirements.txt b/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/requirements.txt deleted file mode 100644 index 0aa70e3fbe53..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/samples/streaming_echo_agent/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -../../ -../../../azure-ai-agentserver-invocations -websockets \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/conftest.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/conftest.py deleted file mode 100644 index edfcbd1fffc4..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/conftest.py +++ /dev/null @@ -1,192 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -"""Shared fixtures and factory functions for websocket WebSocket tests.""" -from typing import Any - -import pytest -from starlette.testclient import TestClient - -from azure.ai.agentserver.websocket import ( - WebsocketAgentServerHost, - WebsocketContext, - WebsocketError, -) - - -# --------------------------------------------------------------------------- -# Sample OpenAPI spec used by several tests -# --------------------------------------------------------------------------- - -SAMPLE_OPENAPI_SPEC: dict[str, Any] = { - "openapi": "3.0.0", - "info": {"title": "Echo Agent", "version": "1.0.0"}, - "paths": { - "/websocket": { - "post": { - "requestBody": { - "required": True, - "content": { - "application/json": { - "schema": { - "type": "object", - "required": ["message"], - "properties": { - "message": {"type": "string"}, - }, - } - } - }, - }, - "responses": { - "200": { - "description": "OK", - "content": { - "application/json": { - "schema": { - "type": "object", - "properties": { - "reply": {"type": "string"}, - }, - } - } - }, - } - }, - } - } - }, -} - - -# --------------------------------------------------------------------------- -# Factory functions -# --------------------------------------------------------------------------- - - -def _make_echo_agent(**kwargs: Any) -> WebsocketAgentServerHost: - """Create an WebsocketAgentServerHost whose invoke handler echoes the payload.""" - app = WebsocketAgentServerHost(**kwargs) - - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: - return {"echo": payload, "websocket_id": context.websocket_id} - - return app - - -def _make_streaming_agent(**kwargs: Any) -> WebsocketAgentServerHost: - """Create an WebsocketAgentServerHost whose invoke handler yields 3 JSON chunks.""" - app = WebsocketAgentServerHost(**kwargs) - - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext): - for i in range(3): - yield {"chunk": i} - - return app - - -def _make_async_storage_agent(**kwargs: Any) -> WebsocketAgentServerHost: - """Create an WebsocketAgentServerHost with get/cancel handlers and in-memory store.""" - app = WebsocketAgentServerHost(**kwargs) - store: dict[str, dict] = {} - - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: - store[context.websocket_id] = payload - return {"stored": True, "websocket_id": context.websocket_id} - - @app.get_websocket_handler - async def get_handler(context: WebsocketContext) -> dict: - if context.websocket_id not in store: - raise WebsocketError("not_found", "Not found") - return {"data": store[context.websocket_id]} - - @app.cancel_websocket_handler - async def cancel_handler(context: WebsocketContext) -> dict: - if context.websocket_id not in store: - raise WebsocketError("not_found", "Not found") - del store[context.websocket_id] - return {"status": "cancelled"} - - return app - - -def _make_validated_agent() -> WebsocketAgentServerHost: - """Create an WebsocketAgentServerHost with OpenAPI spec.""" - app = WebsocketAgentServerHost(openapi_spec=SAMPLE_OPENAPI_SPEC) - - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: - return {"reply": f"echo: {payload['message']}"} - - return app - - -def _make_failing_agent(**kwargs: Any) -> WebsocketAgentServerHost: - """Create an WebsocketAgentServerHost whose handler raises ValueError.""" - app = WebsocketAgentServerHost(**kwargs) - - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: - raise ValueError("something went wrong") - - return app - - -# --------------------------------------------------------------------------- -# Fixtures -# --------------------------------------------------------------------------- - - -@pytest.fixture() -def echo_app(): - return _make_echo_agent() - - -@pytest.fixture() -def echo_client(echo_app): - return TestClient(echo_app) - - -@pytest.fixture() -def streaming_app(): - return _make_streaming_agent() - - -@pytest.fixture() -def streaming_client(streaming_app): - return TestClient(streaming_app) - - -@pytest.fixture() -def async_storage_app(): - return _make_async_storage_agent() - - -@pytest.fixture() -def async_storage_client(async_storage_app): - return TestClient(async_storage_app) - - -@pytest.fixture() -def validated_client(): - app = _make_validated_agent() - return TestClient(app) - - -@pytest.fixture() -def no_spec_client(): - app = _make_echo_agent() - return TestClient(app) - - -@pytest.fixture() -def failing_app(): - return _make_failing_agent() - - -@pytest.fixture() -def failing_client(failing_app): - return TestClient(failing_app) diff --git a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_decorator_pattern.py b/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_decorator_pattern.py deleted file mode 100644 index 589599135a9b..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-websocket/tests/test_decorator_pattern.py +++ /dev/null @@ -1,209 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -"""Tests for decorator-based handler registration on WebsocketAgentServerHost.""" -from starlette.testclient import TestClient - -from azure.ai.agentserver.websocket import ( - WebsocketAgentServerHost, - WebsocketContext, - WebsocketError, -) - - -# --------------------------------------------------------------------------- -# invoke_handler stores function -# --------------------------------------------------------------------------- - -def test_invoke_handler_stores_function(): - """@app.invoke_handler stores the function on the protocol object.""" - app = WebsocketAgentServerHost() - - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: - return {"ok": True} - - assert app._invoke_fn is handle - - -# --------------------------------------------------------------------------- -# invoke_handler returns original function -# --------------------------------------------------------------------------- - -def test_invoke_handler_returns_original_function(): - """@app.invoke_handler returns the original function.""" - app = WebsocketAgentServerHost() - - async def handle(payload: dict, context: WebsocketContext) -> dict: - return {"ok": True} - - result = app.invoke_handler(handle) - assert result is handle - - -# --------------------------------------------------------------------------- -# get_websocket_handler stores function -# --------------------------------------------------------------------------- - -def test_get_websocket_handler_stores_function(): - """@app.get_websocket_handler stores the function.""" - app = WebsocketAgentServerHost() - - @app.get_websocket_handler - async def get_handler(context: WebsocketContext) -> dict: - return {"ok": True} - - assert app._get_websocket_fn is get_handler - - -# --------------------------------------------------------------------------- -# cancel_websocket_handler stores function -# --------------------------------------------------------------------------- - -def test_cancel_websocket_handler_stores_function(): - """@app.cancel_websocket_handler stores the function.""" - app = WebsocketAgentServerHost() - - @app.cancel_websocket_handler - async def cancel_handler(context: WebsocketContext) -> dict: - return {"ok": True} - - assert app._cancel_websocket_fn is cancel_handler - - -# --------------------------------------------------------------------------- -# shutdown_handler stores function -# --------------------------------------------------------------------------- - -def test_shutdown_handler_stores_function(): - """@server.shutdown_handler stores the function on the server.""" - app = WebsocketAgentServerHost() - - @app.shutdown_handler - async def on_shutdown(): - pass - - assert app._shutdown_fn is on_shutdown - - -# --------------------------------------------------------------------------- -# Full request flow -# --------------------------------------------------------------------------- - -def test_full_request_flow(): - """Full lifecycle: invoke → get → cancel → get (not_found).""" - app = WebsocketAgentServerHost() - store: dict[str, dict] = {} - - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: - store[context.websocket_id] = payload - return {"stored": True} - - @app.get_websocket_handler - async def get_handler(context: WebsocketContext) -> dict: - if context.websocket_id not in store: - raise WebsocketError("not_found", "Not found") - return {"data": store[context.websocket_id]} - - @app.cancel_websocket_handler - async def cancel_handler(context: WebsocketContext) -> dict: - if context.websocket_id not in store: - raise WebsocketError("not_found", "Not found") - del store[context.websocket_id] - return {"status": "cancelled"} - - client = TestClient(app) - with client.websocket_connect("/websocket/ws") as ws: - # Invoke - ws.send_json({"action": "invoke", "payload": {"key": "lifecycle-test"}}) - invoke_resp = ws.receive_json() - assert invoke_resp["type"] == "result" - inv_id = invoke_resp["websocket_id"] - - # Get - ws.send_json({"action": "get_websocket", "websocket_id": inv_id}) - get_resp = ws.receive_json() - assert get_resp["type"] == "result" - assert get_resp["payload"]["data"]["key"] == "lifecycle-test" - - # Cancel - ws.send_json({"action": "cancel_websocket", "websocket_id": inv_id}) - cancel_resp = ws.receive_json() - assert cancel_resp["type"] == "result" - assert cancel_resp["payload"]["status"] == "cancelled" - - # Get after cancel - ws.send_json({"action": "get_websocket", "websocket_id": inv_id}) - get_resp2 = ws.receive_json() - assert get_resp2["type"] == "error" - assert get_resp2["error"]["code"] == "not_found" - - -# --------------------------------------------------------------------------- -# Missing optional handlers -# --------------------------------------------------------------------------- - -def test_missing_invoke_handler_returns_error(): - """Invoke without registered handler returns not_implemented error.""" - app = WebsocketAgentServerHost() - client = TestClient(app) - with client.websocket_connect("/websocket/ws") as ws: - ws.send_json({"action": "invoke", "payload": {}}) - resp = ws.receive_json() - assert resp["type"] == "error" - assert resp["error"]["code"] == "not_implemented" - - -def test_missing_get_handler_returns_error(): - """get_websocket without registered handler returns not_found error.""" - app = WebsocketAgentServerHost() - - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: - return {"ok": True} - - client = TestClient(app) - with client.websocket_connect("/websocket/ws") as ws: - ws.send_json({"action": "get_websocket", "websocket_id": "some-id"}) - resp = ws.receive_json() - assert resp["type"] == "error" - assert resp["error"]["code"] == "not_found" - - -def test_missing_cancel_handler_returns_error(): - """cancel_websocket without registered handler returns not_found error.""" - app = WebsocketAgentServerHost() - - @app.invoke_handler - async def handle(payload: dict, context: WebsocketContext) -> dict: - return {"ok": True} - - client = TestClient(app) - with client.websocket_connect("/websocket/ws") as ws: - ws.send_json({"action": "cancel_websocket", "websocket_id": "some-id"}) - resp = ws.receive_json() - assert resp["type"] == "error" - assert resp["error"]["code"] == "not_found" - - -# --------------------------------------------------------------------------- -# Optional handler defaults and overrides -# --------------------------------------------------------------------------- - -def test_optional_handlers_default_none(): - """Get and cancel handlers default to None.""" - app = WebsocketAgentServerHost() - assert app._get_websocket_fn is None - assert app._cancel_websocket_fn is None - - -def test_optional_handler_override(): - """Setting an optional handler replaces None.""" - app = WebsocketAgentServerHost() - - @app.get_websocket_handler - async def get_handler(context: WebsocketContext) -> dict: - return {"ok": True} - - assert app._get_websocket_fn is not None From 2537ddc67b0c3a1dc727a5263cc95b30dcbd4197 Mon Sep 17 00:00:00 2001 From: Azure SDK Bot <53356347+azure-sdk@users.noreply.github.com> Date: Wed, 6 May 2026 23:25:26 -0400 Subject: [PATCH 06/10] [AutoPR azure-mgmt-fileshares]-generated-from-SDK Generation - Python-6256859 (#46749) * Configurations: 'specification/fileshares/resource-manager/Microsoft.FileShares/FileShares/tspconfig.yaml', and CommitSHA: '61f4dfae58f52c2c814ea7c6653ce77ac0cacd4a' in SpecRepo: 'https://github.com/Azure/azure-rest-api-specs' Pipeline run: https://dev.azure.com/azure-sdk/internal/_build/results?buildId=6256859 Refer to https://eng.ms/docs/products/azure-developer-experience/develop/sdk-release/sdk-release-prerequisites to prepare for SDK release. * update test files * Update pyproject.toml * Configurations: 'specification/fileshares/resource-manager/Microsoft.FileShares/FileShares/tspconfig.yaml', and CommitSHA: '75ca22c5387184fb1f0c3b115ed22a006813800d' in SpecRepo: 'https://github.com/Azure/azure-rest-api-specs' Pipeline run: https://dev.azure.com/azure-sdk/internal/_build/results?buildId=6257089 Refer to https://eng.ms/docs/products/azure-developer-experience/develop/sdk-release/sdk-release-prerequisites to prepare for SDK release. * update Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> * update --------- Co-authored-by: Yuchao Yan Co-authored-by: Chenxi Jiang (WICRESOFT NORTH AMERICA LTD) Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> --- .../azure-mgmt-fileshares/CHANGELOG.md | 7 + sdk/fileshares/azure-mgmt-fileshares/LICENSE | 21 + .../azure-mgmt-fileshares/MANIFEST.in | 7 + .../azure-mgmt-fileshares/README.md | 61 + .../azure-mgmt-fileshares/_metadata.json | 10 + .../apiview-properties.json | 100 + .../azure-mgmt-fileshares/assets.json | 6 + .../azure-mgmt-fileshares/azure/__init__.py | 1 + .../azure/mgmt/__init__.py | 1 + .../azure/mgmt/fileshares/__init__.py | 32 + .../azure/mgmt/fileshares/_client.py | 167 + .../azure/mgmt/fileshares/_configuration.py | 80 + .../azure/mgmt/fileshares/_patch.py | 20 + .../azure/mgmt/fileshares/_utils/__init__.py | 6 + .../mgmt/fileshares/_utils/model_base.py | 1441 +++++++ .../mgmt/fileshares/_utils/serialization.py | 2041 +++++++++ .../azure/mgmt/fileshares/_validation.py | 66 + .../azure/mgmt/fileshares/_version.py | 9 + .../azure/mgmt/fileshares/aio/__init__.py | 29 + .../azure/mgmt/fileshares/aio/_client.py | 172 + .../mgmt/fileshares/aio/_configuration.py | 80 + .../azure/mgmt/fileshares/aio/_patch.py | 20 + .../fileshares/aio/operations/__init__.py | 35 + .../fileshares/aio/operations/_operations.py | 3210 ++++++++++++++ .../mgmt/fileshares/aio/operations/_patch.py | 20 + .../azure/mgmt/fileshares/models/__init__.py | 130 + .../azure/mgmt/fileshares/models/_enums.py | 161 + .../azure/mgmt/fileshares/models/_models.py | 1585 +++++++ .../azure/mgmt/fileshares/models/_patch.py | 20 + .../mgmt/fileshares/operations/__init__.py | 35 + .../mgmt/fileshares/operations/_operations.py | 3807 +++++++++++++++++ .../mgmt/fileshares/operations/_patch.py | 20 + .../azure/mgmt/fileshares/py.typed | 1 + .../dev_requirements.txt | 5 + .../file_share_get_limits_maximum_set_gen.py | 41 + .../file_share_get_limits_minimum_set_gen.py | 41 + ...isioning_recommendation_maximum_set_gen.py | 42 + ...isioning_recommendation_minimum_set_gen.py | 42 + ...le_share_get_usage_data_maximum_set_gen.py | 41 + ...le_share_get_usage_data_minimum_set_gen.py | 41 + ...apshot_create_or_update_maximum_set_gen.py | 44 + ...e_share_snapshot_delete_maximum_set_gen.py | 42 + ...file_share_snapshot_get_maximum_set_gen.py | 43 + ...ile_share_snapshot_list_maximum_set_gen.py | 43 + ...ile_share_snapshot_list_minimum_set_gen.py | 43 + ...e_share_snapshot_update_maximum_set_gen.py | 44 + ...check_name_availability_maximum_set_gen.py | 42 + ...check_name_availability_minimum_set_gen.py | 42 + ...shares_create_or_update_maximum_set_gen.py | 63 + .../file_shares_delete_maximum_set_gen.py | 41 + .../file_shares_get_maximum_set_gen.py | 42 + ...e_shares_list_by_parent_maximum_set_gen.py | 42 + ...e_shares_list_by_parent_minimum_set_gen.py | 42 + ...es_list_by_subscription_maximum_set_gen.py | 40 + ...es_list_by_subscription_minimum_set_gen.py | 40 + .../file_shares_update_maximum_set_gen.py | 58 + .../operations_list_maximum_set_gen.py | 40 + .../operations_list_minimum_set_gen.py | 40 + .../private_endpoint_connections_create.py | 48 + .../private_endpoint_connections_delete.py | 42 + .../private_endpoint_connections_get.py | 43 + ...endpoint_connections_list_by_file_share.py | 43 + .../private_link_resources_get.py | 43 + ...ivate_link_resources_list_by_file_share.py | 43 + .../generated_tests/conftest.py | 35 + ...es_mgmt_file_share_snapshots_operations.py | 93 + ...t_file_share_snapshots_operations_async.py | 100 + ...file_shares_mgmt_file_shares_operations.py | 162 + ...hares_mgmt_file_shares_operations_async.py | 169 + ...gmt_informational_operations_operations.py | 50 + ...formational_operations_operations_async.py | 51 + .../test_file_shares_mgmt_operations.py | 27 + .../test_file_shares_mgmt_operations_async.py | 28 + ...private_endpoint_connections_operations.py | 89 + ...e_endpoint_connections_operations_async.py | 94 + ..._mgmt_private_link_resources_operations.py | 42 + ...private_link_resources_operations_async.py | 43 + .../azure-mgmt-fileshares/pyproject.toml | 86 + .../tests/_fs_test_helpers.py | 163 + .../azure-mgmt-fileshares/tests/conftest.py | 40 + .../test_fileshares_complex_scenarios.py | 123 + .../tests/test_fileshares_crud.py | 122 + .../tests/test_fileshares_edge_cases.py | 137 + .../tests/test_fileshares_informational.py | 94 + .../tests/test_fileshares_listing.py | 54 + .../tests/test_fileshares_negative.py | 134 + .../tests/test_fileshares_operations.py | 40 + .../tests/test_fileshares_private_endpoint.py | 72 + .../tests/test_fileshares_snapshots.py | 115 + .../azure-mgmt-fileshares/tsp-location.yaml | 4 + sdk/fileshares/ci.yml | 34 + 91 files changed, 16908 insertions(+) create mode 100644 sdk/fileshares/azure-mgmt-fileshares/CHANGELOG.md create mode 100644 sdk/fileshares/azure-mgmt-fileshares/LICENSE create mode 100644 sdk/fileshares/azure-mgmt-fileshares/MANIFEST.in create mode 100644 sdk/fileshares/azure-mgmt-fileshares/README.md create mode 100644 sdk/fileshares/azure-mgmt-fileshares/_metadata.json create mode 100644 sdk/fileshares/azure-mgmt-fileshares/apiview-properties.json create mode 100644 sdk/fileshares/azure-mgmt-fileshares/assets.json create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/__init__.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/__init__.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/__init__.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_client.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_configuration.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_patch.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_utils/__init__.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_utils/model_base.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_utils/serialization.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_validation.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_version.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/__init__.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/_client.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/_configuration.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/_patch.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/operations/__init__.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/operations/_operations.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/operations/_patch.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/models/__init__.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/models/_enums.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/models/_models.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/models/_patch.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/operations/__init__.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/operations/_operations.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/operations/_patch.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/py.typed create mode 100644 sdk/fileshares/azure-mgmt-fileshares/dev_requirements.txt create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_limits_maximum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_limits_minimum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_provisioning_recommendation_maximum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_provisioning_recommendation_minimum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_usage_data_maximum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_usage_data_minimum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_create_or_update_maximum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_delete_maximum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_get_maximum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_list_maximum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_list_minimum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_update_maximum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_check_name_availability_maximum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_check_name_availability_minimum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_create_or_update_maximum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_delete_maximum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_get_maximum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_list_by_parent_maximum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_list_by_parent_minimum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_list_by_subscription_maximum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_list_by_subscription_minimum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_update_maximum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/operations_list_maximum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/operations_list_minimum_set_gen.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_endpoint_connections_create.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_endpoint_connections_delete.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_endpoint_connections_get.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_endpoint_connections_list_by_file_share.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_link_resources_get.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_link_resources_list_by_file_share.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_tests/conftest.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_file_share_snapshots_operations.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_file_share_snapshots_operations_async.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_file_shares_operations.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_file_shares_operations_async.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_informational_operations_operations.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_informational_operations_operations_async.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_operations.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_operations_async.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_private_endpoint_connections_operations.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_private_endpoint_connections_operations_async.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_private_link_resources_operations.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_private_link_resources_operations_async.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/pyproject.toml create mode 100644 sdk/fileshares/azure-mgmt-fileshares/tests/_fs_test_helpers.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/tests/conftest.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_complex_scenarios.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_crud.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_edge_cases.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_informational.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_listing.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_negative.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_operations.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_private_endpoint.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_snapshots.py create mode 100644 sdk/fileshares/azure-mgmt-fileshares/tsp-location.yaml create mode 100644 sdk/fileshares/ci.yml diff --git a/sdk/fileshares/azure-mgmt-fileshares/CHANGELOG.md b/sdk/fileshares/azure-mgmt-fileshares/CHANGELOG.md new file mode 100644 index 000000000000..e8e4a849c601 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/CHANGELOG.md @@ -0,0 +1,7 @@ +# Release History + +## 1.0.0b1 (2026-05-06) + +### Other Changes + + - Initial version \ No newline at end of file diff --git a/sdk/fileshares/azure-mgmt-fileshares/LICENSE b/sdk/fileshares/azure-mgmt-fileshares/LICENSE new file mode 100644 index 000000000000..63447fd8bbbf --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) Microsoft Corporation. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/sdk/fileshares/azure-mgmt-fileshares/MANIFEST.in b/sdk/fileshares/azure-mgmt-fileshares/MANIFEST.in new file mode 100644 index 000000000000..b1c7dc3a10f4 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/MANIFEST.in @@ -0,0 +1,7 @@ +include *.md +include LICENSE +include azure/mgmt/fileshares/py.typed +recursive-include tests *.py +recursive-include samples *.py *.md +include azure/__init__.py +include azure/mgmt/__init__.py diff --git a/sdk/fileshares/azure-mgmt-fileshares/README.md b/sdk/fileshares/azure-mgmt-fileshares/README.md new file mode 100644 index 000000000000..c9f7e970b354 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/README.md @@ -0,0 +1,61 @@ +# Microsoft Azure SDK for Python + +This is the Microsoft Azure FileShares Management Client Library. +This package has been tested with Python 3.10+. +For a more complete view of Azure libraries, see the [azure sdk python release](https://aka.ms/azsdk/python/all). + +## _Disclaimer_ + +_Azure SDK Python packages support for Python 2.7 has ended 01 January 2022. For more information and questions, please refer to https://github.com/Azure/azure-sdk-for-python/issues/20691_ + +## Getting started + +### Prerequisites + +- Python 3.10+ is required to use this package. +- [Azure subscription](https://azure.microsoft.com/free/) + +### Install the package + +```bash +pip install azure-mgmt-fileshares +pip install azure-identity +``` + +### Authentication + +By default, [Azure Active Directory](https://aka.ms/awps/aad) token authentication depends on correct configuration of the following environment variables. + +- `AZURE_CLIENT_ID` for Azure client ID. +- `AZURE_TENANT_ID` for Azure tenant ID. +- `AZURE_CLIENT_SECRET` for Azure client secret. + +In addition, Azure subscription ID can be configured via environment variable `AZURE_SUBSCRIPTION_ID`. + +With above configuration, client can be authenticated by following code: + +```python +from azure.identity import DefaultAzureCredential +from azure.mgmt.fileshares import FileSharesMgmtClient +import os + +sub_id = os.getenv("AZURE_SUBSCRIPTION_ID") +client = FileSharesMgmtClient(credential=DefaultAzureCredential(), subscription_id=sub_id) +``` + +## Examples + +Code samples for this package can be found at: +- [Search FileShares Management](https://docs.microsoft.com/samples/browse/?languages=python&term=Getting%20started%20-%20Managing&terms=Getting%20started%20-%20Managing) on docs.microsoft.com +- [Azure Python Mgmt SDK Samples Repo](https://aka.ms/azsdk/python/mgmt/samples) + + +## Troubleshooting + +## Next steps + +## Provide Feedback + +If you encounter any bugs or have suggestions, please file an issue in the +[Issues](https://github.com/Azure/azure-sdk-for-python/issues) +section of the project. diff --git a/sdk/fileshares/azure-mgmt-fileshares/_metadata.json b/sdk/fileshares/azure-mgmt-fileshares/_metadata.json new file mode 100644 index 000000000000..be8bb204be34 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/_metadata.json @@ -0,0 +1,10 @@ +{ + "apiVersion": "2026-06-01", + "apiVersions": { + "Microsoft.FileShares": "2026-06-01" + }, + "commit": "75ca22c5387184fb1f0c3b115ed22a006813800d", + "repository_url": "https://github.com/Azure/azure-rest-api-specs", + "typespec_src": "specification/fileshares/resource-manager/Microsoft.FileShares/FileShares", + "emitterVersion": "0.61.3" +} \ No newline at end of file diff --git a/sdk/fileshares/azure-mgmt-fileshares/apiview-properties.json b/sdk/fileshares/azure-mgmt-fileshares/apiview-properties.json new file mode 100644 index 000000000000..646dbfda30db --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/apiview-properties.json @@ -0,0 +1,100 @@ +{ + "CrossLanguagePackageId": "Microsoft.FileShares", + "CrossLanguageDefinitionId": { + "azure.mgmt.fileshares.models.CheckNameAvailabilityRequest": "Azure.ResourceManager.CommonTypes.CheckNameAvailabilityRequest", + "azure.mgmt.fileshares.models.CheckNameAvailabilityResponse": "Azure.ResourceManager.CommonTypes.CheckNameAvailabilityResponse", + "azure.mgmt.fileshares.models.ErrorAdditionalInfo": "Azure.ResourceManager.CommonTypes.ErrorAdditionalInfo", + "azure.mgmt.fileshares.models.ErrorDetail": "Azure.ResourceManager.CommonTypes.ErrorDetail", + "azure.mgmt.fileshares.models.ErrorResponse": "Azure.ResourceManager.CommonTypes.ErrorResponse", + "azure.mgmt.fileshares.models.Resource": "Azure.ResourceManager.CommonTypes.Resource", + "azure.mgmt.fileshares.models.TrackedResource": "Azure.ResourceManager.CommonTypes.TrackedResource", + "azure.mgmt.fileshares.models.FileShare": "Microsoft.FileShares.FileShare", + "azure.mgmt.fileshares.models.FileShareLimits": "Microsoft.FileShares.FileShareLimits", + "azure.mgmt.fileshares.models.FileShareLimitsOutput": "Microsoft.FileShares.FileShareLimitsOutput", + "azure.mgmt.fileshares.models.FileShareLimitsResponse": "Microsoft.FileShares.FileShareLimitsResponse", + "azure.mgmt.fileshares.models.FileShareProperties": "Microsoft.FileShares.FileShareProperties", + "azure.mgmt.fileshares.models.FileShareProvisioningConstants": "Microsoft.FileShares.FileShareProvisioningConstants", + "azure.mgmt.fileshares.models.FileShareProvisioningRecommendationInput": "Microsoft.FileShares.FileShareProvisioningRecommendationInput", + "azure.mgmt.fileshares.models.FileShareProvisioningRecommendationOutput": "Microsoft.FileShares.FileShareProvisioningRecommendationOutput", + "azure.mgmt.fileshares.models.FileShareProvisioningRecommendationRequest": "Microsoft.FileShares.FileShareProvisioningRecommendationRequest", + "azure.mgmt.fileshares.models.FileShareProvisioningRecommendationResponse": "Microsoft.FileShares.FileShareProvisioningRecommendationResponse", + "azure.mgmt.fileshares.models.ProxyResource": "Azure.ResourceManager.CommonTypes.ProxyResource", + "azure.mgmt.fileshares.models.FileShareSnapshot": "Microsoft.FileShares.FileShareSnapshot", + "azure.mgmt.fileshares.models.FileShareSnapshotProperties": "Microsoft.FileShares.FileShareSnapshotProperties", + "azure.mgmt.fileshares.models.FileShareSnapshotUpdate": "Azure.ResourceManager.Foundations.ResourceUpdateModel", + "azure.mgmt.fileshares.models.FileShareSnapshotUpdateProperties": "Azure.ResourceManager.Foundations.ResourceUpdateModelProperties", + "azure.mgmt.fileshares.models.FileShareUpdate": "Azure.ResourceManager.Foundations.ResourceUpdateModel", + "azure.mgmt.fileshares.models.FileShareUpdateProperties": "Azure.ResourceManager.Foundations.ResourceUpdateModelProperties", + "azure.mgmt.fileshares.models.FileShareUsageDataOutput": "Microsoft.FileShares.FileShareUsageDataOutput", + "azure.mgmt.fileshares.models.FileShareUsageDataResponse": "Microsoft.FileShares.FileShareUsageDataResponse", + "azure.mgmt.fileshares.models.LiveSharesUsageData": "Microsoft.FileShares.LiveSharesUsageData", + "azure.mgmt.fileshares.models.NfsProtocolProperties": "Microsoft.FileShares.NfsProtocolProperties", + "azure.mgmt.fileshares.models.Operation": "Azure.ResourceManager.CommonTypes.Operation", + "azure.mgmt.fileshares.models.OperationDisplay": "Azure.ResourceManager.CommonTypes.OperationDisplay", + "azure.mgmt.fileshares.models.PrivateEndpoint": "Azure.ResourceManager.CommonTypes.PrivateEndpoint", + "azure.mgmt.fileshares.models.PrivateEndpointConnection": "Microsoft.FileShares.PrivateEndpointConnection", + "azure.mgmt.fileshares.models.PrivateEndpointConnectionProperties": "Azure.ResourceManager.CommonTypes.PrivateEndpointConnectionProperties", + "azure.mgmt.fileshares.models.PrivateLinkResource": "Microsoft.FileShares.PrivateLinkResource", + "azure.mgmt.fileshares.models.PrivateLinkResourceProperties": "Microsoft.FileShares.PrivateLinkResourceProperties", + "azure.mgmt.fileshares.models.PrivateLinkServiceConnectionState": "Azure.ResourceManager.CommonTypes.PrivateLinkServiceConnectionState", + "azure.mgmt.fileshares.models.PublicAccessProperties": "Microsoft.FileShares.PublicAccessProperties", + "azure.mgmt.fileshares.models.SystemData": "Azure.ResourceManager.CommonTypes.SystemData", + "azure.mgmt.fileshares.models.CreatedByType": "Azure.ResourceManager.CommonTypes.createdByType", + "azure.mgmt.fileshares.models.MediaTier": "Microsoft.FileShares.MediaTier", + "azure.mgmt.fileshares.models.Redundancy": "Microsoft.FileShares.Redundancy", + "azure.mgmt.fileshares.models.Protocol": "Microsoft.FileShares.Protocol", + "azure.mgmt.fileshares.models.ShareRootSquash": "Microsoft.FileShares.ShareRootSquash", + "azure.mgmt.fileshares.models.EncryptionInTransitRequired": "Microsoft.FileShares.EncryptionInTransitRequired", + "azure.mgmt.fileshares.models.FileShareProvisioningState": "Microsoft.FileShares.FileShareProvisioningState", + "azure.mgmt.fileshares.models.PublicNetworkAccess": "Microsoft.FileShares.PublicNetworkAccess", + "azure.mgmt.fileshares.models.PrivateEndpointServiceConnectionStatus": "Azure.ResourceManager.CommonTypes.PrivateEndpointServiceConnectionStatus", + "azure.mgmt.fileshares.models.PrivateEndpointConnectionProvisioningState": "Azure.ResourceManager.CommonTypes.PrivateEndpointConnectionProvisioningState", + "azure.mgmt.fileshares.models.CheckNameAvailabilityReason": "Azure.ResourceManager.CommonTypes.CheckNameAvailabilityReason", + "azure.mgmt.fileshares.models.Origin": "Azure.ResourceManager.CommonTypes.Origin", + "azure.mgmt.fileshares.models.ActionType": "Azure.ResourceManager.CommonTypes.ActionType", + "azure.mgmt.fileshares.operations.FileSharesOperations.get": "Microsoft.FileShares.FileShares.get", + "azure.mgmt.fileshares.aio.operations.FileSharesOperations.get": "Microsoft.FileShares.FileShares.get", + "azure.mgmt.fileshares.operations.FileSharesOperations.begin_create_or_update": "Microsoft.FileShares.FileShares.createOrUpdate", + "azure.mgmt.fileshares.aio.operations.FileSharesOperations.begin_create_or_update": "Microsoft.FileShares.FileShares.createOrUpdate", + "azure.mgmt.fileshares.operations.FileSharesOperations.begin_update": "Microsoft.FileShares.FileShares.update", + "azure.mgmt.fileshares.aio.operations.FileSharesOperations.begin_update": "Microsoft.FileShares.FileShares.update", + "azure.mgmt.fileshares.operations.FileSharesOperations.begin_delete": "Microsoft.FileShares.FileShares.delete", + "azure.mgmt.fileshares.aio.operations.FileSharesOperations.begin_delete": "Microsoft.FileShares.FileShares.delete", + "azure.mgmt.fileshares.operations.FileSharesOperations.list_by_subscription": "Microsoft.FileShares.FileShares.listBySubscription", + "azure.mgmt.fileshares.aio.operations.FileSharesOperations.list_by_subscription": "Microsoft.FileShares.FileShares.listBySubscription", + "azure.mgmt.fileshares.operations.FileSharesOperations.list_by_parent": "Microsoft.FileShares.FileShares.listByParent", + "azure.mgmt.fileshares.aio.operations.FileSharesOperations.list_by_parent": "Microsoft.FileShares.FileShares.listByParent", + "azure.mgmt.fileshares.operations.FileSharesOperations.check_name_availability": "Microsoft.FileShares.FileShares.checkNameAvailability", + "azure.mgmt.fileshares.aio.operations.FileSharesOperations.check_name_availability": "Microsoft.FileShares.FileShares.checkNameAvailability", + "azure.mgmt.fileshares.operations.FileShareSnapshotsOperations.get_file_share_snapshot": "Microsoft.FileShares.FileShareSnapshots.getFileShareSnapshot", + "azure.mgmt.fileshares.aio.operations.FileShareSnapshotsOperations.get_file_share_snapshot": "Microsoft.FileShares.FileShareSnapshots.getFileShareSnapshot", + "azure.mgmt.fileshares.operations.FileShareSnapshotsOperations.begin_create_or_update_file_share_snapshot": "Microsoft.FileShares.FileShareSnapshots.createOrUpdateFileShareSnapshot", + "azure.mgmt.fileshares.aio.operations.FileShareSnapshotsOperations.begin_create_or_update_file_share_snapshot": "Microsoft.FileShares.FileShareSnapshots.createOrUpdateFileShareSnapshot", + "azure.mgmt.fileshares.operations.FileShareSnapshotsOperations.begin_update_file_share_snapshot": "Microsoft.FileShares.FileShareSnapshots.updateFileShareSnapshot", + "azure.mgmt.fileshares.aio.operations.FileShareSnapshotsOperations.begin_update_file_share_snapshot": "Microsoft.FileShares.FileShareSnapshots.updateFileShareSnapshot", + "azure.mgmt.fileshares.operations.FileShareSnapshotsOperations.begin_delete_file_share_snapshot": "Microsoft.FileShares.FileShareSnapshots.deleteFileShareSnapshot", + "azure.mgmt.fileshares.aio.operations.FileShareSnapshotsOperations.begin_delete_file_share_snapshot": "Microsoft.FileShares.FileShareSnapshots.deleteFileShareSnapshot", + "azure.mgmt.fileshares.operations.FileShareSnapshotsOperations.list_by_file_share": "Microsoft.FileShares.FileShareSnapshots.listByFileShare", + "azure.mgmt.fileshares.aio.operations.FileShareSnapshotsOperations.list_by_file_share": "Microsoft.FileShares.FileShareSnapshots.listByFileShare", + "azure.mgmt.fileshares.operations.Operations.list": "Microsoft.FileShares.Operations.list", + "azure.mgmt.fileshares.aio.operations.Operations.list": "Microsoft.FileShares.Operations.list", + "azure.mgmt.fileshares.operations.InformationalOperationsOperations.get_usage_data": "Microsoft.FileShares.InformationalOperations.getUsageData", + "azure.mgmt.fileshares.aio.operations.InformationalOperationsOperations.get_usage_data": "Microsoft.FileShares.InformationalOperations.getUsageData", + "azure.mgmt.fileshares.operations.InformationalOperationsOperations.get_limits": "Microsoft.FileShares.InformationalOperations.getLimits", + "azure.mgmt.fileshares.aio.operations.InformationalOperationsOperations.get_limits": "Microsoft.FileShares.InformationalOperations.getLimits", + "azure.mgmt.fileshares.operations.InformationalOperationsOperations.get_provisioning_recommendation": "Microsoft.FileShares.InformationalOperations.getProvisioningRecommendation", + "azure.mgmt.fileshares.aio.operations.InformationalOperationsOperations.get_provisioning_recommendation": "Microsoft.FileShares.InformationalOperations.getProvisioningRecommendation", + "azure.mgmt.fileshares.operations.PrivateEndpointConnectionsOperations.get": "Microsoft.FileShares.PrivateEndpointConnections.get", + "azure.mgmt.fileshares.aio.operations.PrivateEndpointConnectionsOperations.get": "Microsoft.FileShares.PrivateEndpointConnections.get", + "azure.mgmt.fileshares.operations.PrivateEndpointConnectionsOperations.begin_create": "Microsoft.FileShares.PrivateEndpointConnections.create", + "azure.mgmt.fileshares.aio.operations.PrivateEndpointConnectionsOperations.begin_create": "Microsoft.FileShares.PrivateEndpointConnections.create", + "azure.mgmt.fileshares.operations.PrivateEndpointConnectionsOperations.begin_delete": "Microsoft.FileShares.PrivateEndpointConnections.delete", + "azure.mgmt.fileshares.aio.operations.PrivateEndpointConnectionsOperations.begin_delete": "Microsoft.FileShares.PrivateEndpointConnections.delete", + "azure.mgmt.fileshares.operations.PrivateEndpointConnectionsOperations.list_by_file_share": "Microsoft.FileShares.PrivateEndpointConnections.listByFileShare", + "azure.mgmt.fileshares.aio.operations.PrivateEndpointConnectionsOperations.list_by_file_share": "Microsoft.FileShares.PrivateEndpointConnections.listByFileShare", + "azure.mgmt.fileshares.operations.PrivateLinkResourcesOperations.get": "Microsoft.FileShares.PrivateLinkResources.get", + "azure.mgmt.fileshares.aio.operations.PrivateLinkResourcesOperations.get": "Microsoft.FileShares.PrivateLinkResources.get", + "azure.mgmt.fileshares.operations.PrivateLinkResourcesOperations.list": "Microsoft.FileShares.PrivateLinkResources.list", + "azure.mgmt.fileshares.aio.operations.PrivateLinkResourcesOperations.list": "Microsoft.FileShares.PrivateLinkResources.list" + } +} \ No newline at end of file diff --git a/sdk/fileshares/azure-mgmt-fileshares/assets.json b/sdk/fileshares/azure-mgmt-fileshares/assets.json new file mode 100644 index 000000000000..ce1b98341b63 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/assets.json @@ -0,0 +1,6 @@ +{ + "AssetsRepo": "Azure/azure-sdk-assets", + "AssetsRepoPrefixPath": "python", + "TagPrefix": "python/fileshares/azure-mgmt-fileshares", + "Tag": "python/fileshares/azure-mgmt-fileshares_d5b0dfcc03" +} diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/__init__.py b/sdk/fileshares/azure-mgmt-fileshares/azure/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/__init__.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/__init__.py new file mode 100644 index 000000000000..d55ccad1f573 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/__init__.py @@ -0,0 +1 @@ +__path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/__init__.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/__init__.py new file mode 100644 index 000000000000..921a61a3dac4 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/__init__.py @@ -0,0 +1,32 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import FileSharesMgmtClient # type: ignore +from ._version import VERSION + +__version__ = VERSION + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "FileSharesMgmtClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_client.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_client.py new file mode 100644 index 000000000000..b0d998c645d0 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_client.py @@ -0,0 +1,167 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Optional, TYPE_CHECKING, cast +from typing_extensions import Self + +from azure.core.pipeline import policies +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.settings import settings +from azure.mgmt.core import ARMPipelineClient +from azure.mgmt.core.policies import ARMAutoResourceProviderRegistrationPolicy +from azure.mgmt.core.tools import get_arm_endpoints + +from ._configuration import FileSharesMgmtClientConfiguration +from ._utils.serialization import Deserializer, Serializer +from .operations import ( + FileShareSnapshotsOperations, + FileSharesOperations, + InformationalOperationsOperations, + Operations, + PrivateEndpointConnectionsOperations, + PrivateLinkResourcesOperations, +) + +if TYPE_CHECKING: + from azure.core import AzureClouds + from azure.core.credentials import TokenCredential + + +class FileSharesMgmtClient: + """Azure File Shares Resource Provider API. + + :ivar file_shares: FileSharesOperations operations + :vartype file_shares: azure.mgmt.fileshares.operations.FileSharesOperations + :ivar file_share_snapshots: FileShareSnapshotsOperations operations + :vartype file_share_snapshots: azure.mgmt.fileshares.operations.FileShareSnapshotsOperations + :ivar operations: Operations operations + :vartype operations: azure.mgmt.fileshares.operations.Operations + :ivar informational_operations: InformationalOperationsOperations operations + :vartype informational_operations: + azure.mgmt.fileshares.operations.InformationalOperationsOperations + :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations + :vartype private_endpoint_connections: + azure.mgmt.fileshares.operations.PrivateEndpointConnectionsOperations + :ivar private_link_resources: PrivateLinkResourcesOperations operations + :vartype private_link_resources: + azure.mgmt.fileshares.operations.PrivateLinkResourcesOperations + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The ID of the target subscription. The value must be an UUID. Required. + :type subscription_id: str + :param base_url: Service host. Default value is None. + :type base_url: str + :keyword cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is + None. + :paramtype cloud_setting: ~azure.core.AzureClouds + :keyword api_version: The API version to use for this operation. Known values are "2026-06-01". + Default value is "2026-06-01". Note that overriding this default value may result in + unsupported behavior. + :paramtype api_version: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + """ + + def __init__( + self, + credential: "TokenCredential", + subscription_id: str, + base_url: Optional[str] = None, + *, + cloud_setting: Optional["AzureClouds"] = None, + **kwargs: Any + ) -> None: + _endpoint = "{endpoint}" + _cloud = cloud_setting or settings.current.azure_cloud # type: ignore + _endpoints = get_arm_endpoints(_cloud) + if not base_url: + base_url = _endpoints["resource_manager"] + credential_scopes = kwargs.pop("credential_scopes", _endpoints["credential_scopes"]) + self._config = FileSharesMgmtClientConfiguration( + credential=credential, + subscription_id=subscription_id, + base_url=cast(str, base_url), + cloud_setting=cloud_setting, + credential_scopes=credential_scopes, + **kwargs + ) + + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + ARMAutoResourceProviderRegistrationPolicy(), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: ARMPipelineClient = ARMPipelineClient(base_url=cast(str, _endpoint), policies=_policies, **kwargs) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + self.file_shares = FileSharesOperations(self._client, self._config, self._serialize, self._deserialize) + self.file_share_snapshots = FileShareSnapshotsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) + self.informational_operations = InformationalOperationsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.private_endpoint_connections = PrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.private_link_resources = PrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + + def send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> HttpResponse: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.HttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + def close(self) -> None: + self._client.close() + + def __enter__(self) -> Self: + self._client.__enter__() + return self + + def __exit__(self, *exc_details: Any) -> None: + self._client.__exit__(*exc_details) diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_configuration.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_configuration.py new file mode 100644 index 000000000000..de5c8f6ae4af --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_configuration.py @@ -0,0 +1,80 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, Optional, TYPE_CHECKING + +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy + +from ._version import VERSION + +if TYPE_CHECKING: + from azure.core import AzureClouds + from azure.core.credentials import TokenCredential + + +class FileSharesMgmtClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for FileSharesMgmtClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials.TokenCredential + :param subscription_id: The ID of the target subscription. The value must be an UUID. Required. + :type subscription_id: str + :param base_url: Service host. Default value is "https://management.azure.com". + :type base_url: str + :param cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is + None. + :type cloud_setting: ~azure.core.AzureClouds + :keyword api_version: The API version to use for this operation. Known values are "2026-06-01". + Default value is "2026-06-01". Note that overriding this default value may result in + unsupported behavior. + :paramtype api_version: str + """ + + def __init__( + self, + credential: "TokenCredential", + subscription_id: str, + base_url: str = "https://management.azure.com", + cloud_setting: Optional["AzureClouds"] = None, + **kwargs: Any + ) -> None: + api_version: str = kwargs.pop("api_version", "2026-06-01") + + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + + self.credential = credential + self.subscription_id = subscription_id + self.base_url = base_url + self.cloud_setting = cloud_setting + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "mgmt-fileshares/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = ARMChallengeAuthenticationPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_patch.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_patch.py new file mode 100644 index 000000000000..ea765788358a --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_patch.py @@ -0,0 +1,20 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_utils/__init__.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_utils/__init__.py new file mode 100644 index 000000000000..8026245c2abc --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_utils/__init__.py @@ -0,0 +1,6 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_utils/model_base.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_utils/model_base.py new file mode 100644 index 000000000000..db24930fdca9 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_utils/model_base.py @@ -0,0 +1,1441 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=protected-access, broad-except + +import copy +import calendar +import decimal +import functools +import sys +import logging +import base64 +import re +import typing +import enum +import email.utils +from datetime import datetime, date, time, timedelta, timezone +from json import JSONEncoder +import xml.etree.ElementTree as ET +from collections.abc import MutableMapping +from typing_extensions import Self +import isodate +from azure.core.exceptions import DeserializationError +from azure.core import CaseInsensitiveEnumMeta +from azure.core.pipeline import PipelineResponse +from azure.core.serialization import _Null +from azure.core.rest import HttpResponse + +_LOGGER = logging.getLogger(__name__) + +__all__ = ["SdkJSONEncoder", "Model", "rest_field", "rest_discriminator"] + +TZ_UTC = timezone.utc +_T = typing.TypeVar("_T") +_NONE_TYPE = type(None) + + +def _timedelta_as_isostr(td: timedelta) -> str: + """Converts a datetime.timedelta object into an ISO 8601 formatted string, e.g. 'P4DT12H30M05S' + + Function adapted from the Tin Can Python project: https://github.com/RusticiSoftware/TinCanPython + + :param timedelta td: The timedelta to convert + :rtype: str + :return: ISO8601 version of this timedelta + """ + + # Split seconds to larger units + seconds = td.total_seconds() + minutes, seconds = divmod(seconds, 60) + hours, minutes = divmod(minutes, 60) + days, hours = divmod(hours, 24) + + days, hours, minutes = list(map(int, (days, hours, minutes))) + seconds = round(seconds, 6) + + # Build date + date_str = "" + if days: + date_str = "%sD" % days + + if hours or minutes or seconds: + # Build time + time_str = "T" + + # Hours + bigger_exists = date_str or hours + if bigger_exists: + time_str += "{:02}H".format(hours) + + # Minutes + bigger_exists = bigger_exists or minutes + if bigger_exists: + time_str += "{:02}M".format(minutes) + + # Seconds + try: + if seconds.is_integer(): + seconds_string = "{:02}".format(int(seconds)) + else: + # 9 chars long w/ leading 0, 6 digits after decimal + seconds_string = "%09.6f" % seconds + # Remove trailing zeros + seconds_string = seconds_string.rstrip("0") + except AttributeError: # int.is_integer() raises + seconds_string = "{:02}".format(seconds) + + time_str += "{}S".format(seconds_string) + else: + time_str = "" + + return "P" + date_str + time_str + + +def _serialize_bytes(o, format: typing.Optional[str] = None) -> str: + encoded = base64.b64encode(o).decode() + if format == "base64url": + return encoded.strip("=").replace("+", "-").replace("/", "_") + return encoded + + +def _serialize_datetime(o, format: typing.Optional[str] = None): + if hasattr(o, "year") and hasattr(o, "hour"): + if format == "rfc7231": + return email.utils.format_datetime(o, usegmt=True) + if format == "unix-timestamp": + return int(calendar.timegm(o.utctimetuple())) + + # astimezone() fails for naive times in Python 2.7, so make make sure o is aware (tzinfo is set) + if not o.tzinfo: + iso_formatted = o.replace(tzinfo=TZ_UTC).isoformat() + else: + iso_formatted = o.astimezone(TZ_UTC).isoformat() + # Replace the trailing "+00:00" UTC offset with "Z" (RFC 3339: https://www.ietf.org/rfc/rfc3339.txt) + return iso_formatted.replace("+00:00", "Z") + # Next try datetime.date or datetime.time + return o.isoformat() + + +def _is_readonly(p): + try: + return p._visibility == ["read"] + except AttributeError: + return False + + +class SdkJSONEncoder(JSONEncoder): + """A JSON encoder that's capable of serializing datetime objects and bytes.""" + + def __init__(self, *args, exclude_readonly: bool = False, format: typing.Optional[str] = None, **kwargs): + super().__init__(*args, **kwargs) + self.exclude_readonly = exclude_readonly + self.format = format + + def default(self, o): # pylint: disable=too-many-return-statements + if _is_model(o): + if self.exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + return {k: v for k, v in o.items() if k not in readonly_props} + return dict(o.items()) + try: + return super(SdkJSONEncoder, self).default(o) + except TypeError: + if isinstance(o, _Null): + return None + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, self.format) + try: + # First try datetime.datetime + return _serialize_datetime(o, self.format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return super(SdkJSONEncoder, self).default(o) + + +_VALID_DATE = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" + r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") +_VALID_RFC7231 = re.compile( + r"(Mon|Tue|Wed|Thu|Fri|Sat|Sun),\s\d{2}\s" + r"(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s\d{4}\s\d{2}:\d{2}:\d{2}\sGMT" +) + +_ARRAY_ENCODE_MAPPING = { + "pipeDelimited": "|", + "spaceDelimited": " ", + "commaDelimited": ",", + "newlineDelimited": "\n", +} + + +def _deserialize_array_encoded(delimit: str, attr): + if isinstance(attr, str): + if attr == "": + return [] + return attr.split(delimit) + return attr + + +def _deserialize_datetime(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + attr = attr.upper() + match = _VALID_DATE.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + return date_obj # type: ignore[no-any-return] + + +def _deserialize_datetime_rfc7231(attr: typing.Union[str, datetime]) -> datetime: + """Deserialize RFC7231 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + match = _VALID_RFC7231.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + return email.utils.parsedate_to_datetime(attr) + + +def _deserialize_datetime_unix_timestamp(attr: typing.Union[float, datetime]) -> datetime: + """Deserialize unix timestamp into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: ~datetime.datetime + :returns: The datetime object from that input + """ + if isinstance(attr, datetime): + # i'm already deserialized + return attr + return datetime.fromtimestamp(attr, TZ_UTC) + + +def _deserialize_date(attr: typing.Union[str, date]) -> date: + """Deserialize ISO-8601 formatted string into Date object. + :param str attr: response string to be deserialized. + :rtype: date + :returns: The date object from that input + """ + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + if isinstance(attr, date): + return attr + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) # type: ignore + + +def _deserialize_time(attr: typing.Union[str, time]) -> time: + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :returns: The time object from that input + """ + if isinstance(attr, time): + return attr + return isodate.parse_time(attr) # type: ignore[no-any-return] + + +def _deserialize_bytes(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + return bytes(base64.b64decode(attr)) + + +def _deserialize_bytes_base64(attr): + if isinstance(attr, (bytes, bytearray)): + return attr + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return bytes(base64.b64decode(encoded)) + + +def _deserialize_duration(attr): + if isinstance(attr, timedelta): + return attr + return isodate.parse_duration(attr) + + +def _deserialize_decimal(attr): + if isinstance(attr, decimal.Decimal): + return attr + return decimal.Decimal(str(attr)) + + +def _deserialize_int_as_str(attr): + if isinstance(attr, int): + return attr + return int(attr) + + +_DESERIALIZE_MAPPING = { + datetime: _deserialize_datetime, + date: _deserialize_date, + time: _deserialize_time, + bytes: _deserialize_bytes, + bytearray: _deserialize_bytes, + timedelta: _deserialize_duration, + typing.Any: lambda x: x, + decimal.Decimal: _deserialize_decimal, +} + +_DESERIALIZE_MAPPING_WITHFORMAT = { + "rfc3339": _deserialize_datetime, + "rfc7231": _deserialize_datetime_rfc7231, + "unix-timestamp": _deserialize_datetime_unix_timestamp, + "base64": _deserialize_bytes, + "base64url": _deserialize_bytes_base64, +} + + +def get_deserializer(annotation: typing.Any, rf: typing.Optional["_RestField"] = None): + if annotation is int and rf and rf._format == "str": + return _deserialize_int_as_str + if annotation is str and rf and rf._format in _ARRAY_ENCODE_MAPPING: + return functools.partial(_deserialize_array_encoded, _ARRAY_ENCODE_MAPPING[rf._format]) + if rf and rf._format: + return _DESERIALIZE_MAPPING_WITHFORMAT.get(rf._format) + return _DESERIALIZE_MAPPING.get(annotation) # pyright: ignore + + +def _get_type_alias_type(module_name: str, alias_name: str): + types = { + k: v + for k, v in sys.modules[module_name].__dict__.items() + if isinstance(v, typing._GenericAlias) # type: ignore + } + if alias_name not in types: + return alias_name + return types[alias_name] + + +def _get_model(module_name: str, model_name: str): + models = {k: v for k, v in sys.modules[module_name].__dict__.items() if isinstance(v, type)} + module_end = module_name.rsplit(".", 1)[0] + models.update({k: v for k, v in sys.modules[module_end].__dict__.items() if isinstance(v, type)}) + if isinstance(model_name, str): + model_name = model_name.split(".")[-1] + if model_name not in models: + return model_name + return models[model_name] + + +_UNSET = object() + + +class _MyMutableMapping(MutableMapping[str, typing.Any]): + def __init__(self, data: dict[str, typing.Any]) -> None: + self._data = data + + def __contains__(self, key: typing.Any) -> bool: + return key in self._data + + def __getitem__(self, key: str) -> typing.Any: + # If this key has been deserialized (for mutable types), we need to handle serialization + if hasattr(self, "_attr_to_rest_field"): + cache_attr = f"_deserialized_{key}" + if hasattr(self, cache_attr): + rf = _get_rest_field(getattr(self, "_attr_to_rest_field"), key) + if rf: + value = self._data.get(key) + if isinstance(value, (dict, list, set)): + # For mutable types, serialize and return + # But also update _data with serialized form and clear flag + # so mutations via this returned value affect _data + serialized = _serialize(value, rf._format) + # If serialized form is same type (no transformation needed), + # return _data directly so mutations work + if isinstance(serialized, type(value)) and serialized == value: + return self._data.get(key) + # Otherwise return serialized copy and clear flag + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass + # Store serialized form back + self._data[key] = serialized + return serialized + return self._data.__getitem__(key) + + def __setitem__(self, key: str, value: typing.Any) -> None: + # Clear any cached deserialized value when setting through dictionary access + cache_attr = f"_deserialized_{key}" + try: + object.__delattr__(self, cache_attr) + except AttributeError: + pass + self._data.__setitem__(key, value) + + def __delitem__(self, key: str) -> None: + self._data.__delitem__(key) + + def __iter__(self) -> typing.Iterator[typing.Any]: + return self._data.__iter__() + + def __len__(self) -> int: + return self._data.__len__() + + def __ne__(self, other: typing.Any) -> bool: + return not self.__eq__(other) + + def keys(self) -> typing.KeysView[str]: + """ + :returns: a set-like object providing a view on D's keys + :rtype: ~typing.KeysView + """ + return self._data.keys() + + def values(self) -> typing.ValuesView[typing.Any]: + """ + :returns: an object providing a view on D's values + :rtype: ~typing.ValuesView + """ + return self._data.values() + + def items(self) -> typing.ItemsView[str, typing.Any]: + """ + :returns: set-like object providing a view on D's items + :rtype: ~typing.ItemsView + """ + return self._data.items() + + def get(self, key: str, default: typing.Any = None) -> typing.Any: + """ + Get the value for key if key is in the dictionary, else default. + :param str key: The key to look up. + :param any default: The value to return if key is not in the dictionary. Defaults to None + :returns: D[k] if k in D, else d. + :rtype: any + """ + try: + return self[key] + except KeyError: + return default + + @typing.overload + def pop(self, key: str) -> typing.Any: ... # pylint: disable=arguments-differ + + @typing.overload + def pop(self, key: str, default: _T) -> _T: ... # pylint: disable=signature-differs + + @typing.overload + def pop(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def pop(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Removes specified key and return the corresponding value. + :param str key: The key to pop. + :param any default: The value to return if key is not in the dictionary + :returns: The value corresponding to the key. + :rtype: any + :raises KeyError: If key is not found and default is not given. + """ + if default is _UNSET: + return self._data.pop(key) + return self._data.pop(key, default) + + def popitem(self) -> tuple[str, typing.Any]: + """ + Removes and returns some (key, value) pair + :returns: The (key, value) pair. + :rtype: tuple + :raises KeyError: if D is empty. + """ + return self._data.popitem() + + def clear(self) -> None: + """ + Remove all items from D. + """ + self._data.clear() + + def update(self, *args: typing.Any, **kwargs: typing.Any) -> None: # pylint: disable=arguments-differ + """ + Updates D from mapping/iterable E and F. + :param any args: Either a mapping object or an iterable of key-value pairs. + """ + self._data.update(*args, **kwargs) + + @typing.overload + def setdefault(self, key: str, default: None = None) -> None: ... + + @typing.overload + def setdefault(self, key: str, default: typing.Any) -> typing.Any: ... # pylint: disable=signature-differs + + def setdefault(self, key: str, default: typing.Any = _UNSET) -> typing.Any: + """ + Same as calling D.get(k, d), and setting D[k]=d if k not found + :param str key: The key to look up. + :param any default: The value to set if key is not in the dictionary + :returns: D[k] if k in D, else d. + :rtype: any + """ + if default is _UNSET: + return self._data.setdefault(key) + return self._data.setdefault(key, default) + + def __eq__(self, other: typing.Any) -> bool: + if isinstance(other, _MyMutableMapping): + return self._data == other._data + try: + other_model = self.__class__(other) + except Exception: + return False + return self._data == other_model._data + + def __repr__(self) -> str: + return str(self._data) + + +def _is_model(obj: typing.Any) -> bool: + return getattr(obj, "_is_model", False) + + +def _serialize(o, format: typing.Optional[str] = None): # pylint: disable=too-many-return-statements + if isinstance(o, list): + if format in _ARRAY_ENCODE_MAPPING and all(isinstance(x, str) for x in o): + return _ARRAY_ENCODE_MAPPING[format].join(o) + return [_serialize(x, format) for x in o] + if isinstance(o, dict): + return {k: _serialize(v, format) for k, v in o.items()} + if isinstance(o, set): + return {_serialize(x, format) for x in o} + if isinstance(o, tuple): + return tuple(_serialize(x, format) for x in o) + if isinstance(o, (bytes, bytearray)): + return _serialize_bytes(o, format) + if isinstance(o, decimal.Decimal): + return float(o) + if isinstance(o, enum.Enum): + return o.value + if isinstance(o, int): + if format == "str": + return str(o) + return o + try: + # First try datetime.datetime + return _serialize_datetime(o, format) + except AttributeError: + pass + # Last, try datetime.timedelta + try: + return _timedelta_as_isostr(o) + except AttributeError: + # This will be raised when it hits value.total_seconds in the method above + pass + return o + + +def _get_rest_field(attr_to_rest_field: dict[str, "_RestField"], rest_name: str) -> typing.Optional["_RestField"]: + try: + return next(rf for rf in attr_to_rest_field.values() if rf._rest_name == rest_name) + except StopIteration: + return None + + +def _create_value(rf: typing.Optional["_RestField"], value: typing.Any) -> typing.Any: + if not rf: + return _serialize(value, None) + if rf._is_multipart_file_input: + return value + if rf._is_model: + return _deserialize(rf._type, value) + if isinstance(value, ET.Element): + value = _deserialize(rf._type, value) + return _serialize(value, rf._format) + + +class Model(_MyMutableMapping): + _is_model = True + # label whether current class's _attr_to_rest_field has been calculated + # could not see _attr_to_rest_field directly because subclass inherits it from parent class + _calculated: set[str] = set() + + def __init__(self, *args: typing.Any, **kwargs: typing.Any) -> None: + class_name = self.__class__.__name__ + if len(args) > 1: + raise TypeError(f"{class_name}.__init__() takes 2 positional arguments but {len(args) + 1} were given") + dict_to_pass = { + rest_field._rest_name: rest_field._default + for rest_field in self._attr_to_rest_field.values() + if rest_field._default is not _UNSET + } + if args: + if isinstance(args[0], ET.Element): + dict_to_pass.update(self._init_from_xml(args[0])) + else: + dict_to_pass.update( + {k: _create_value(_get_rest_field(self._attr_to_rest_field, k), v) for k, v in args[0].items()} + ) + else: + non_attr_kwargs = [k for k in kwargs if k not in self._attr_to_rest_field] + if non_attr_kwargs: + # actual type errors only throw the first wrong keyword arg they see, so following that. + raise TypeError(f"{class_name}.__init__() got an unexpected keyword argument '{non_attr_kwargs[0]}'") + dict_to_pass.update( + { + self._attr_to_rest_field[k]._rest_name: _create_value(self._attr_to_rest_field[k], v) + for k, v in kwargs.items() + if v is not None + } + ) + super().__init__(dict_to_pass) + + def _init_from_xml(self, element: ET.Element) -> dict[str, typing.Any]: + """Deserialize an XML element into a dict mapping rest field names to values. + + :param ET.Element element: The XML element to deserialize from. + :returns: A dictionary of rest_name to deserialized value pairs. + :rtype: dict + """ + result: dict[str, typing.Any] = {} + model_meta = getattr(self, "_xml", {}) + existed_attr_keys: list[str] = [] + + for rf in self._attr_to_rest_field.values(): + prop_meta = getattr(rf, "_xml", {}) + xml_name = prop_meta.get("name", rf._rest_name) + xml_ns = _resolve_xml_ns(prop_meta, model_meta) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + # attribute + if prop_meta.get("attribute", False) and element.get(xml_name) is not None: + existed_attr_keys.append(xml_name) + result[rf._rest_name] = _deserialize(rf._type, element.get(xml_name)) + continue + + # unwrapped element is array + if prop_meta.get("unwrapped", False): + # unwrapped array could either use prop items meta/prop meta + _items_name = prop_meta.get("itemsName") + if _items_name: + xml_name = _items_name + _items_ns = prop_meta.get("itemsNs") + if _items_ns is not None: + xml_ns = _items_ns + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + items = element.findall(xml_name) # pyright: ignore + if len(items) > 0: + existed_attr_keys.append(xml_name) + result[rf._rest_name] = _deserialize(rf._type, items) + elif not rf._is_optional: + existed_attr_keys.append(xml_name) + result[rf._rest_name] = [] + continue + + # text element is primitive type + if prop_meta.get("text", False): + if element.text is not None: + result[rf._rest_name] = _deserialize(rf._type, element.text) + continue + + # wrapped element could be normal property or array, it should only have one element + item = element.find(xml_name) + if item is not None: + existed_attr_keys.append(xml_name) + result[rf._rest_name] = _deserialize(rf._type, item) + + # rest thing is additional properties + for e in element: + if e.tag not in existed_attr_keys: + result[e.tag] = _convert_element(e) + + return result + + def copy(self) -> "Model": + return Model(self.__dict__) + + def __new__(cls, *args: typing.Any, **kwargs: typing.Any) -> Self: + if f"{cls.__module__}.{cls.__qualname__}" not in cls._calculated: + # we know the last nine classes in mro are going to be 'Model', '_MyMutableMapping', 'MutableMapping', + # 'Mapping', 'Collection', 'Sized', 'Iterable', 'Container' and 'object' + mros = cls.__mro__[:-9][::-1] # ignore parents, and reverse the mro order + attr_to_rest_field: dict[str, _RestField] = { # map attribute name to rest_field property + k: v for mro_class in mros for k, v in mro_class.__dict__.items() if k[0] != "_" and hasattr(v, "_type") + } + annotations = { + k: v + for mro_class in mros + if hasattr(mro_class, "__annotations__") + for k, v in mro_class.__annotations__.items() + } + for attr, rf in attr_to_rest_field.items(): + rf._module = cls.__module__ + if not rf._type: + rf._type = rf._get_deserialize_callable_from_annotation(annotations.get(attr, None)) + if not rf._rest_name_input: + rf._rest_name_input = attr + cls._attr_to_rest_field: dict[str, _RestField] = dict(attr_to_rest_field.items()) + cls._calculated.add(f"{cls.__module__}.{cls.__qualname__}") + + return super().__new__(cls) + + def __init_subclass__(cls, discriminator: typing.Optional[str] = None) -> None: + for base in cls.__bases__: + if hasattr(base, "__mapping__"): + base.__mapping__[discriminator or cls.__name__] = cls # type: ignore + + @classmethod + def _get_discriminator(cls, exist_discriminators) -> typing.Optional["_RestField"]: + for v in cls.__dict__.values(): + if isinstance(v, _RestField) and v._is_discriminator and v._rest_name not in exist_discriminators: + return v + return None + + @classmethod + def _deserialize(cls, data, exist_discriminators): + if not hasattr(cls, "__mapping__"): + return cls(data) + discriminator = cls._get_discriminator(exist_discriminators) + if discriminator is None: + return cls(data) + exist_discriminators.append(discriminator._rest_name) + if isinstance(data, ET.Element): + model_meta = getattr(cls, "_xml", {}) + prop_meta = getattr(discriminator, "_xml", {}) + xml_name = prop_meta.get("name", discriminator._rest_name) + xml_ns = _resolve_xml_ns(prop_meta, model_meta) + if xml_ns: + xml_name = "{" + xml_ns + "}" + xml_name + + if data.get(xml_name) is not None: + discriminator_value = data.get(xml_name) + else: + discriminator_value = data.find(xml_name).text # pyright: ignore + else: + discriminator_value = data.get(discriminator._rest_name) + mapped_cls = cls.__mapping__.get(discriminator_value, cls) # pyright: ignore # pylint: disable=no-member + return mapped_cls._deserialize(data, exist_discriminators) + + def as_dict(self, *, exclude_readonly: bool = False) -> dict[str, typing.Any]: + """Return a dict that can be turned into json using json.dump. + + :keyword bool exclude_readonly: Whether to remove the readonly properties. + :returns: A dict JSON compatible object + :rtype: dict + """ + + result = {} + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in self._attr_to_rest_field.values() if _is_readonly(p)] + for k, v in self.items(): + if exclude_readonly and k in readonly_props: # pyright: ignore + continue + is_multipart_file_input = False + try: + is_multipart_file_input = next( + rf for rf in self._attr_to_rest_field.values() if rf._rest_name == k + )._is_multipart_file_input + except StopIteration: + pass + result[k] = v if is_multipart_file_input else Model._as_dict_value(v, exclude_readonly=exclude_readonly) + return result + + @staticmethod + def _as_dict_value(v: typing.Any, exclude_readonly: bool = False) -> typing.Any: + if v is None or isinstance(v, _Null): + return None + if isinstance(v, (list, tuple, set)): + return type(v)(Model._as_dict_value(x, exclude_readonly=exclude_readonly) for x in v) + if isinstance(v, dict): + return {dk: Model._as_dict_value(dv, exclude_readonly=exclude_readonly) for dk, dv in v.items()} + return v.as_dict(exclude_readonly=exclude_readonly) if hasattr(v, "as_dict") else v + + +def _deserialize_model(model_deserializer: typing.Optional[typing.Callable], obj): + if _is_model(obj): + return obj + return _deserialize(model_deserializer, obj) + + +def _deserialize_with_optional(if_obj_deserializer: typing.Optional[typing.Callable], obj): + if obj is None: + return obj + return _deserialize_with_callable(if_obj_deserializer, obj) + + +def _deserialize_with_union(deserializers, obj): + for deserializer in deserializers: + try: + return _deserialize(deserializer, obj) + except DeserializationError: + pass + raise DeserializationError() + + +def _deserialize_dict( + value_deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj: dict[typing.Any, typing.Any], +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = {child.tag: child for child in obj} + return {k: _deserialize(value_deserializer, v, module) for k, v in obj.items()} + + +def _deserialize_multiple_sequence( + entry_deserializers: list[typing.Optional[typing.Callable]], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + return type(obj)(_deserialize(deserializer, entry, module) for entry, deserializer in zip(obj, entry_deserializers)) + + +def _is_array_encoded_deserializer(deserializer: functools.partial) -> bool: + return ( + isinstance(deserializer, functools.partial) + and isinstance(deserializer.args[0], functools.partial) + and deserializer.args[0].func == _deserialize_array_encoded # pylint: disable=comparison-with-callable + ) + + +def _deserialize_sequence( + deserializer: typing.Optional[typing.Callable], + module: typing.Optional[str], + obj, +): + if obj is None: + return obj + if isinstance(obj, ET.Element): + obj = list(obj) + + # encoded string may be deserialized to sequence + if isinstance(obj, str) and isinstance(deserializer, functools.partial): + # for list[str] + if _is_array_encoded_deserializer(deserializer): + return deserializer(obj) + + # for list[Union[...]] + if isinstance(deserializer.args[0], list): + for sub_deserializer in deserializer.args[0]: + if _is_array_encoded_deserializer(sub_deserializer): + return sub_deserializer(obj) + + return type(obj)(_deserialize(deserializer, entry, module) for entry in obj) + + +def _sorted_annotations(types: list[typing.Any]) -> list[typing.Any]: + return sorted( + types, + key=lambda x: hasattr(x, "__name__") and x.__name__.lower() in ("str", "float", "int", "bool"), + ) + + +def _get_deserialize_callable_from_annotation( # pylint: disable=too-many-return-statements, too-many-statements, too-many-branches + annotation: typing.Any, + module: typing.Optional[str], + rf: typing.Optional["_RestField"] = None, +) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + if not annotation: + return None + + # is it a type alias? + if isinstance(annotation, str): + if module is not None: + annotation = _get_type_alias_type(module, annotation) + + # is it a forward ref / in quotes? + if isinstance(annotation, (str, typing.ForwardRef)): + try: + model_name = annotation.__forward_arg__ # type: ignore + except AttributeError: + model_name = annotation + if module is not None: + annotation = _get_model(module, model_name) # type: ignore + + try: + if module and _is_model(annotation): + if rf: + rf._is_model = True + + return functools.partial(_deserialize_model, annotation) # pyright: ignore + except Exception: + pass + + # is it a literal? + try: + if annotation.__origin__ is typing.Literal: # pyright: ignore + return None + except AttributeError: + pass + + # is it optional? + try: + if any(a is _NONE_TYPE for a in annotation.__args__): # pyright: ignore + if rf: + rf._is_optional = True + if len(annotation.__args__) <= 2: # pyright: ignore + if_obj_deserializer = _get_deserialize_callable_from_annotation( + next(a for a in annotation.__args__ if a is not _NONE_TYPE), module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_with_optional, if_obj_deserializer) + # the type is Optional[Union[...]], we need to remove the None type from the Union + annotation_copy = copy.copy(annotation) + annotation_copy.__args__ = [a for a in annotation_copy.__args__ if a is not _NONE_TYPE] # pyright: ignore + return _get_deserialize_callable_from_annotation(annotation_copy, module, rf) + except AttributeError: + pass + + # is it union? + if getattr(annotation, "__origin__", None) is typing.Union: + # initial ordering is we make `string` the last deserialization option, because it is often them most generic + deserializers = [ + _get_deserialize_callable_from_annotation(arg, module, rf) + for arg in _sorted_annotations(annotation.__args__) # pyright: ignore + ] + + return functools.partial(_deserialize_with_union, deserializers) + + try: + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() == "dict": + value_deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[1], module, rf # pyright: ignore + ) + + return functools.partial( + _deserialize_dict, + value_deserializer, + module, + ) + except (AttributeError, IndexError): + pass + try: + annotation_name = ( + annotation.__name__ if hasattr(annotation, "__name__") else annotation._name # pyright: ignore + ) + if annotation_name.lower() in ["list", "set", "tuple", "sequence"]: + if len(annotation.__args__) > 1: # pyright: ignore + entry_deserializers = [ + _get_deserialize_callable_from_annotation(dt, module, rf) + for dt in annotation.__args__ # pyright: ignore + ] + return functools.partial(_deserialize_multiple_sequence, entry_deserializers, module) + deserializer = _get_deserialize_callable_from_annotation( + annotation.__args__[0], module, rf # pyright: ignore + ) + + return functools.partial(_deserialize_sequence, deserializer, module) + except (TypeError, IndexError, AttributeError, SyntaxError): + pass + + def _deserialize_default( + deserializer, + obj, + ): + if obj is None: + return obj + try: + return _deserialize_with_callable(deserializer, obj) + except Exception: + pass + return obj + + if get_deserializer(annotation, rf): + return functools.partial(_deserialize_default, get_deserializer(annotation, rf)) + + return functools.partial(_deserialize_default, annotation) + + +def _deserialize_with_callable( + deserializer: typing.Optional[typing.Callable[[typing.Any], typing.Any]], + value: typing.Any, +): # pylint: disable=too-many-return-statements + try: + if value is None or isinstance(value, _Null): + return None + if isinstance(value, ET.Element): + if deserializer is str: + return value.text or "" + if deserializer is int: + return int(value.text) if value.text else None + if deserializer is float: + return float(value.text) if value.text else None + if deserializer is bool: + return value.text == "true" if value.text else None + if deserializer and deserializer in _DESERIALIZE_MAPPING.values(): + return deserializer(value.text) if value.text else None + if deserializer and deserializer in _DESERIALIZE_MAPPING_WITHFORMAT.values(): + return deserializer(value.text) if value.text else None + if deserializer is None: + return value + if deserializer in [int, float, bool]: + return deserializer(value) + if isinstance(deserializer, CaseInsensitiveEnumMeta): + try: + return deserializer(value.text if isinstance(value, ET.Element) else value) + except ValueError: + # for unknown value, return raw value + return value.text if isinstance(value, ET.Element) else value + if isinstance(deserializer, type) and issubclass(deserializer, Model): + return deserializer._deserialize(value, []) + return typing.cast(typing.Callable[[typing.Any], typing.Any], deserializer)(value) + except Exception as e: + raise DeserializationError() from e + + +def _deserialize( + deserializer: typing.Any, + value: typing.Any, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + if isinstance(value, PipelineResponse): + value = value.http_response.json() + if rf is None and format: + rf = _RestField(format=format) + if not isinstance(deserializer, functools.partial): + deserializer = _get_deserialize_callable_from_annotation(deserializer, module, rf) + return _deserialize_with_callable(deserializer, value) + + +def _failsafe_deserialize( + deserializer: typing.Any, + response: HttpResponse, + module: typing.Optional[str] = None, + rf: typing.Optional["_RestField"] = None, + format: typing.Optional[str] = None, +) -> typing.Any: + try: + return _deserialize(deserializer, response.json(), module, rf, format) + except Exception: # pylint: disable=broad-except + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +def _failsafe_deserialize_xml( + deserializer: typing.Any, + response: HttpResponse, +) -> typing.Any: + try: + return _deserialize_xml(deserializer, response.text()) + except Exception: # pylint: disable=broad-except + _LOGGER.warning( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + +# pylint: disable=too-many-instance-attributes +class _RestField: + def __init__( + self, + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + is_discriminator: bool = False, + visibility: typing.Optional[list[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[dict[str, typing.Any]] = None, + ): + self._type = type + self._rest_name_input = name + self._module: typing.Optional[str] = None + self._is_discriminator = is_discriminator + self._visibility = visibility + self._is_model = False + self._is_optional = False + self._default = default + self._format = format + self._is_multipart_file_input = is_multipart_file_input + self._xml = xml if xml is not None else {} + + @property + def _class_type(self) -> typing.Any: + result = getattr(self._type, "args", [None])[0] + # type may be wrapped by nested functools.partial so we need to check for that + if isinstance(result, functools.partial): + return getattr(result, "args", [None])[0] + return result + + @property + def _rest_name(self) -> str: + if self._rest_name_input is None: + raise ValueError("Rest name was never set") + return self._rest_name_input + + def __get__(self, obj: Model, type=None): # pylint: disable=redefined-builtin + # by this point, type and rest_name will have a value bc we default + # them in __new__ of the Model class + # Use _data.get() directly to avoid triggering __getitem__ which clears the cache + item = obj._data.get(self._rest_name) + if item is None: + return item + if self._is_model: + return item + + # For mutable types, we want mutations to directly affect _data + # Check if we've already deserialized this value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + # Return the value from _data directly (it's been deserialized in place) + return obj._data.get(self._rest_name) + + deserialized = _deserialize(self._type, _serialize(item, self._format), rf=self) + + # For mutable types, store the deserialized value back in _data + # so mutations directly affect _data + if isinstance(deserialized, (dict, list, set)): + obj._data[self._rest_name] = deserialized + object.__setattr__(obj, cache_attr, True) # Mark as deserialized + return deserialized + + return deserialized + + def __set__(self, obj: Model, value) -> None: + # Clear the cached deserialized object when setting a new value + cache_attr = f"_deserialized_{self._rest_name}" + if hasattr(obj, cache_attr): + object.__delattr__(obj, cache_attr) + + if value is None: + # we want to wipe out entries if users set attr to None + try: + obj.__delitem__(self._rest_name) + except KeyError: + pass + return + if self._is_model: + if not _is_model(value): + value = _deserialize(self._type, value) + obj.__setitem__(self._rest_name, value) + return + obj.__setitem__(self._rest_name, _serialize(value, self._format)) + + def _get_deserialize_callable_from_annotation( + self, annotation: typing.Any + ) -> typing.Optional[typing.Callable[[typing.Any], typing.Any]]: + return _get_deserialize_callable_from_annotation(annotation, self._module, self) + + +def rest_field( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[list[str]] = None, + default: typing.Any = _UNSET, + format: typing.Optional[str] = None, + is_multipart_file_input: bool = False, + xml: typing.Optional[dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField( + name=name, + type=type, + visibility=visibility, + default=default, + format=format, + is_multipart_file_input=is_multipart_file_input, + xml=xml, + ) + + +def rest_discriminator( + *, + name: typing.Optional[str] = None, + type: typing.Optional[typing.Callable] = None, # pylint: disable=redefined-builtin + visibility: typing.Optional[list[str]] = None, + xml: typing.Optional[dict[str, typing.Any]] = None, +) -> typing.Any: + return _RestField(name=name, type=type, is_discriminator=True, visibility=visibility, xml=xml) + + +def serialize_xml(model: Model, exclude_readonly: bool = False) -> str: + """Serialize a model to XML. + + :param Model model: The model to serialize. + :param bool exclude_readonly: Whether to exclude readonly properties. + :returns: The XML representation of the model. + :rtype: str + """ + return ET.tostring(_get_element(model, exclude_readonly), encoding="unicode") # type: ignore + + +def _get_xml_ns(meta: dict[str, typing.Any]) -> typing.Optional[str]: + """Return the XML namespace from a metadata dict, checking both 'ns' (old-style) and 'namespace' (DPG) keys. + + :param dict meta: The metadata dictionary to extract namespace from. + :returns: The namespace string if 'ns' or 'namespace' key is present, None otherwise. + :rtype: str or None + """ + ns = meta.get("ns") + if ns is None: + ns = meta.get("namespace") + return ns + + +def _resolve_xml_ns( + prop_meta: dict[str, typing.Any], model_meta: typing.Optional[dict[str, typing.Any]] = None +) -> typing.Optional[str]: + """Resolve XML namespace for a property, falling back to model namespace when appropriate. + + Checks the property metadata first; if no namespace is found and the model does not declare + an explicit prefix, falls back to the model-level namespace. + + :param dict prop_meta: The property metadata dictionary. + :param dict model_meta: The model metadata dictionary, used as fallback. + :returns: The resolved namespace string, or None. + :rtype: str or None + """ + ns = _get_xml_ns(prop_meta) + if ns is None and model_meta is not None and not model_meta.get("prefix"): + ns = _get_xml_ns(model_meta) + return ns + + +def _set_xml_attribute(element: ET.Element, name: str, value: typing.Any, prop_meta: dict[str, typing.Any]) -> None: + """Set an XML attribute on an element, handling namespace prefix registration. + + :param ET.Element element: The element to set the attribute on. + :param str name: The default attribute name (wire name). + :param any value: The attribute value. + :param dict prop_meta: The property metadata dictionary. + """ + xml_name = prop_meta.get("name", name) + _attr_ns = _get_xml_ns(prop_meta) + if _attr_ns: + _attr_prefix = prop_meta.get("prefix") + if _attr_prefix: + _safe_register_namespace(_attr_prefix, _attr_ns) + xml_name = "{" + _attr_ns + "}" + xml_name + element.set(xml_name, _get_primitive_type_value(value)) + + +def _get_element( + o: typing.Any, + exclude_readonly: bool = False, + parent_meta: typing.Optional[dict[str, typing.Any]] = None, + wrapped_element: typing.Optional[ET.Element] = None, +) -> typing.Union[ET.Element, list[ET.Element]]: + if _is_model(o): + model_meta = getattr(o, "_xml", {}) + + # if prop is a model, then use the prop element directly, else generate a wrapper of model + if wrapped_element is None: + # When serializing as an array item (parent_meta is set), check if the parent has an + # explicit itemsName. This ensures correct element names for unwrapped arrays (where + # the element tag is the property/items name, not the model type name). + _items_name = parent_meta.get("itemsName") if parent_meta is not None else None + element_name = _items_name if _items_name else (model_meta.get("name") or o.__class__.__name__) + _model_ns = _get_xml_ns(model_meta) + wrapped_element = _create_xml_element( + element_name, + model_meta.get("prefix"), + _model_ns, + ) + + readonly_props = [] + if exclude_readonly: + readonly_props = [p._rest_name for p in o._attr_to_rest_field.values() if _is_readonly(p)] + + for k, v in o.items(): + # do not serialize readonly properties + if exclude_readonly and k in readonly_props: + continue + + prop_rest_field = _get_rest_field(o._attr_to_rest_field, k) + if prop_rest_field: + prop_meta = getattr(prop_rest_field, "_xml").copy() + # use the wire name as xml name if no specific name is set + if prop_meta.get("name") is None: + prop_meta["name"] = k + else: + # additional properties will not have rest field, use the wire name as xml name + prop_meta = {"name": k} + + # Propagate model namespace to properties only for old-style "ns"-keyed models. + # DPG-generated models use the "namespace" key and explicitly declare namespace on + # each property that needs it, so propagation is intentionally skipped for them. + if prop_meta.get("ns") is None and model_meta.get("ns"): + prop_meta["ns"] = model_meta.get("ns") + prop_meta["prefix"] = model_meta.get("prefix") + + if prop_meta.get("unwrapped", False): + # unwrapped could only set on array + wrapped_element.extend(_get_element(v, exclude_readonly, prop_meta)) + elif prop_meta.get("text", False): + # text could only set on primitive type + wrapped_element.text = _get_primitive_type_value(v) + elif prop_meta.get("attribute", False): + _set_xml_attribute(wrapped_element, k, v, prop_meta) + else: + # other wrapped prop element + wrapped_element.append(_get_wrapped_element(v, exclude_readonly, prop_meta)) + return wrapped_element + if isinstance(o, list): + return [_get_element(x, exclude_readonly, parent_meta) for x in o] # type: ignore + if isinstance(o, dict): + result = [] + _dict_ns = _get_xml_ns(parent_meta) if parent_meta else None + for k, v in o.items(): + result.append( + _get_wrapped_element( + v, + exclude_readonly, + { + "name": k, + "ns": _dict_ns, + "prefix": parent_meta.get("prefix") if parent_meta else None, + }, + ) + ) + return result + + # primitive case need to create element based on parent_meta + if parent_meta: + _items_ns = parent_meta.get("itemsNs") + if _items_ns is None: + _items_ns = _get_xml_ns(parent_meta) + return _get_wrapped_element( + o, + exclude_readonly, + { + "name": parent_meta.get("itemsName", parent_meta.get("name")), + "prefix": parent_meta.get("itemsPrefix", parent_meta.get("prefix")), + "ns": _items_ns, + }, + ) + + raise ValueError("Could not serialize value into xml: " + o) + + +def _get_wrapped_element( + v: typing.Any, + exclude_readonly: bool, + meta: typing.Optional[dict[str, typing.Any]], +) -> ET.Element: + _meta_ns = _get_xml_ns(meta) if meta else None + wrapped_element = _create_xml_element( + meta.get("name") if meta else None, meta.get("prefix") if meta else None, _meta_ns + ) + if isinstance(v, (dict, list)): + wrapped_element.extend(_get_element(v, exclude_readonly, meta)) + elif _is_model(v): + _get_element(v, exclude_readonly, meta, wrapped_element) + else: + wrapped_element.text = _get_primitive_type_value(v) + return wrapped_element # type: ignore[no-any-return] + + +def _get_primitive_type_value(v) -> str: + if v is True: + return "true" + if v is False: + return "false" + if isinstance(v, _Null): + return "" + return str(v) + + +def _safe_register_namespace(prefix: str, ns: str) -> None: + """Register an XML namespace prefix, handling reserved prefix patterns. + + Some prefixes (e.g. 'ns2') match Python's reserved 'ns\\d+' pattern used for + auto-generated prefixes, causing register_namespace to raise ValueError. + Falls back to directly registering in the internal namespace map. + + :param str prefix: The namespace prefix to register. + :param str ns: The namespace URI. + """ + try: + ET.register_namespace(prefix, ns) + except ValueError: + _ns_map = getattr(ET, "_namespace_map", None) + if _ns_map is not None: + _ns_map[ns] = prefix + + +def _create_xml_element( + tag: typing.Any, prefix: typing.Optional[str] = None, ns: typing.Optional[str] = None +) -> ET.Element: + if prefix and ns: + _safe_register_namespace(prefix, ns) + if ns: + return ET.Element("{" + ns + "}" + tag) + return ET.Element(tag) + + +def _deserialize_xml( + deserializer: typing.Any, + value: str, +) -> typing.Any: + element = ET.fromstring(value) # nosec + return _deserialize(deserializer, element) + + +def _convert_element(e: ET.Element): + # dict case + if len(e.attrib) > 0 or len({child.tag for child in e}) > 1: + dict_result: dict[str, typing.Any] = {} + for child in e: + if dict_result.get(child.tag) is not None: + if isinstance(dict_result[child.tag], list): + dict_result[child.tag].append(_convert_element(child)) + else: + dict_result[child.tag] = [dict_result[child.tag], _convert_element(child)] + else: + dict_result[child.tag] = _convert_element(child) + dict_result.update(e.attrib) + return dict_result + # array case + if len(e) > 0: + array_result: list[typing.Any] = [] + for child in e: + array_result.append(_convert_element(child)) + return array_result + # primitive case + return e.text diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_utils/serialization.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_utils/serialization.py new file mode 100644 index 000000000000..81ec1de5922b --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_utils/serialization.py @@ -0,0 +1,2041 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +# pyright: reportUnnecessaryTypeIgnoreComment=false + +from base64 import b64decode, b64encode +import calendar +import datetime +import decimal +import email +from enum import Enum +import json +import logging +import re +import sys +import codecs +from typing import ( + Any, + cast, + Optional, + Union, + AnyStr, + IO, + Mapping, + Callable, + MutableMapping, +) + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote +import xml.etree.ElementTree as ET + +import isodate # type: ignore +from typing_extensions import Self + +from azure.core.exceptions import DeserializationError, SerializationError +from azure.core.serialization import NULL as CoreNull + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +JSON = MutableMapping[str, Any] + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + :return: The deserialized data. + :rtype: object + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) from err + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError as err: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise DeserializationError("XML is invalid") from err + elif content_type.startswith("text/"): + return data_as_str + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + + :param bytes body_bytes: The body of the response. + :param dict headers: The headers of the response. + :returns: The deserialized data. + :rtype: object + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + +TZ_UTC = datetime.timezone.utc + +_FLATTEN = re.compile(r"(? None: + self.additional_properties: Optional[dict[str, Any]] = {} + for k in kwargs: # pylint: disable=consider-using-dict-items + if k not in self._attribute_map: + _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) + elif k in self._validation and self._validation[k].get("readonly", False): + _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) + else: + setattr(self, k, kwargs[k]) + + def __eq__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are equal + :rtype: bool + """ + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other: Any) -> bool: + """Compare objects by comparing all attributes. + + :param object other: The object to compare + :returns: True if objects are not equal + :rtype: bool + """ + return not self.__eq__(other) + + def __str__(self) -> str: + return str(self.__dict__) + + @classmethod + def enable_additional_properties_sending(cls) -> None: + cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} + + @classmethod + def is_xml_model(cls) -> bool: + try: + cls._xml_map # type: ignore + except AttributeError: + return False + return True + + @classmethod + def _create_xml_node(cls): + """Create XML node. + + :returns: The XML node + :rtype: xml.etree.ElementTree.Element + """ + try: + xml_map = cls._xml_map # type: ignore + except AttributeError: + xml_map = {} + + return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) + + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to server from this model. + + This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, keep_readonly=keep_readonly, **kwargs + ) + + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. + + Advanced usage might optionally use a callback as parameter: + + .. code::python + + def my_key_transformer(key, attr_desc, value): + return key + + Key is the attribute name used in Python. Attr_desc + is a dict of metadata. Currently contains 'type' with the + msrest type and 'key' with the RestAPI encoded key. + Value is the current value in this object. + + The string returned will be used to serialize the key. + If the return type is a list, this is considered hierarchical + result dict. + + See the three examples in this file: + + - attribute_transformer + - full_restapi_key_transformer + - last_restapi_key_transformer + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :param function key_transformer: A key transformer function. + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize( # type: ignore # pylint: disable=protected-access + self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs + ) + + @classmethod + def _infer_class_models(cls): + try: + str_models = cls.__module__.rsplit(".", 1)[0] + models = sys.modules[str_models] + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + if cls.__name__ not in client_models: + raise ValueError("Not Autorest generated code") + except Exception: # pylint: disable=broad-exception-caught + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. + client_models = {cls.__name__: cls} + return client_models + + @classmethod + def deserialize(cls, data: Any, content_type: Optional[str] = None) -> Self: + """Parse a str using the RestAPI syntax and return a model. + + :param str data: A str using RestAPI structure. JSON by default. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def from_dict( + cls, + data: Any, + key_extractors: Optional[Callable[[str, dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> Self: + """Parse a dict using given key extractor return a model. + + By default consider key + extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor + and last_rest_key_case_insensitive_extractor) + + :param dict data: A dict using RestAPI structure + :param function key_extractors: A key extractor function. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises DeserializationError: if something went wrong + :rtype: Self + """ + deserializer = Deserializer(cls._infer_class_models()) + deserializer.key_extractors = ( # type: ignore + [ # type: ignore + attribute_key_case_insensitive_extractor, + rest_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + if key_extractors is None + else key_extractors + ) + return deserializer(cls.__name__, data, content_type=content_type) # type: ignore + + @classmethod + def _flatten_subtype(cls, key, objects): + if "_subtype_map" not in cls.__dict__: + return {} + result = dict(cls._subtype_map[key]) + for valuetype in cls._subtype_map[key].values(): + result |= objects[valuetype]._flatten_subtype(key, objects) # pylint: disable=protected-access + return result + + @classmethod + def _classify(cls, response, objects): + """Check the class _subtype_map for any child classes. + We want to ignore any inherited _subtype_maps. + + :param dict response: The initial data + :param dict objects: The class objects + :returns: The class to be used + :rtype: class + """ + for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): + subtype_value = None + + if not isinstance(response, ET.Element): + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.get(rest_api_response_key, None) or response.get(subtype_key, None) + else: + subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) + if subtype_value: + # Try to match base class. Can be class name only + # (bug to fix in Autorest to support x-ms-discriminator-name) + if cls.__name__ == subtype_value: + return cls + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) + try: + return objects[flatten_mapping_type[subtype_value]] # type: ignore + except KeyError: + _LOGGER.warning( + "Subtype value %s has no mapping, use base class %s.", + subtype_value, + cls.__name__, + ) + break + else: + _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) + break + return cls + + @classmethod + def _get_rest_key_parts(cls, attr_key): + """Get the RestAPI key of this attr, split it and decode part + :param str attr_key: Attribute key must be in attribute_map. + :returns: A list of RestAPI part + :rtype: list + """ + rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] + + +def _decode_attribute_map_key(key): + """This decode a key in an _attribute_map to the actual key we want to look at + inside the received data. + + :param str key: A key string from the generated code + :returns: The decoded key + :rtype: str + """ + return key.replace("\\.", ".") + + +class Serializer: # pylint: disable=too-many-public-methods + """Request object model serializer.""" + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} + months = { + 1: "Jan", + 2: "Feb", + 3: "Mar", + 4: "Apr", + 5: "May", + 6: "Jun", + 7: "Jul", + 8: "Aug", + 9: "Sep", + 10: "Oct", + 11: "Nov", + 12: "Dec", + } + validation = { + "min_length": lambda x, y: len(x) < y, + "max_length": lambda x, y: len(x) > y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies: dict[str, type] = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize( # pylint: disable=too-many-nested-blocks, too-many-branches, too-many-statements, too-many-locals + self, target_obj, data_type=None, **kwargs + ): + """Serialize data into a string according to type. + + :param object target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises SerializationError: if serialization fails. + :returns: The serialized data. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() # pylint: disable=protected-access + try: + attributes = target_obj._attribute_map # pylint: disable=protected-access + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get( # pylint: disable=protected-access + attr_name, {} + ).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized |= target_obj.additional_properties + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) # type: ignore + continue + if xml_desc.get("text", False): + serialized.text = new_attr # type: ignore + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) # type: ignore + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, + # we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) # type: ignore + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = str(new_attr) + serialized.append(local_node) # type: ignore + else: # JSON + for k in reversed(keys): # type: ignore + new_attr = {k: new_attr} + + _new_attr = new_attr + _serialized = serialized + for k in keys: # type: ignore + if k not in _serialized: + _serialized.update(_new_attr) # type: ignore + _new_attr = _new_attr[k] # type: ignore + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise SerializationError(msg) from err + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises SerializationError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized request body + """ + + # Just in case this is a dict + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ # type: ignore + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) # pylint: disable=protected-access + except DeserializationError as err: + raise SerializationError("Unable to build a model: " + str(err)) from err + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param str name: The name of the URL path parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :returns: The serialized URL path + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + output = output.replace("{", quote("{")).replace("}", quote("}")) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param str name: The name of the query parameter. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, list + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized query parameter + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + do_quote = not kwargs.get("skip_quote", False) + return self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param str name: The name of the header. + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises TypeError: if serialization fails. + :raises ValueError: if data is None + :returns: The serialized header + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError as exc: + raise TypeError("{} must be type {}.".format(name, data_type)) from exc + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param object data: The data to be serialized. + :param str data_type: The type to be serialized from. + :raises AttributeError: if required data is None. + :raises ValueError: if data is None + :raises SerializationError: if serialization fails. + :returns: The serialized data. + :rtype: str, int, float, bool, dict, list + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data is CoreNull: + return None + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + if data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, cast(type, data.__class__)) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise SerializationError(msg.format(data, data_type)) from err + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): # pylint: disable=inconsistent-return-statements + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param obj data: Object to be serialized. + :param str data_type: Type of object in the iterable. + :rtype: str, int, float, bool + :return: serialized object + :raises TypeError: raise if data_type is not one of str, int, float, bool. + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + if data_type == "int": + return int(data) + if data_type == "float": + return float(data) + if data_type == "bool": + return bool(data) + raise TypeError("Unknown basic data type: {}".format(data_type)) + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param str data: Object to be serialized. + :rtype: str + :return: serialized object + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): # type: ignore + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list data: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + Defaults to False. + :rtype: list, str + :return: serialized iterable + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized.append(None) + + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :rtype: dict + :return: serialized dictionary + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + :return: serialized object + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + if obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) # type: ignore + return result + except ValueError as exc: + for enum_value in enum_obj: # type: ignore + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) from exc + + @staticmethod + def serialize_bytearray(attr, **kwargs): # pylint: disable=unused-argument + """Serialize bytearray into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): # pylint: disable=unused-argument + """Serialize str into base-64 string. + + :param str attr: Object to be serialized. + :rtype: str + :return: serialized base64 + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Decimal object to float. + + :param decimal attr: Object to be serialized. + :rtype: float + :return: serialized decimal + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): # pylint: disable=unused-argument + """Serialize long (Py2) or int (Py3). + + :param int attr: Object to be serialized. + :rtype: int/long + :return: serialized long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + :return: serialized date + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + :return: serialized time + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): # pylint: disable=unused-argument + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + :return: serialized duration + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises TypeError: if format invalid. + :return: serialized rfc + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError as exc: + raise TypeError("RFC1123 object must be valid Datetime object.") from exc + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises SerializationError: if format invalid. + :return: serialized iso + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise SerializationError(msg) from err + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise TypeError(msg) from err + + @staticmethod + def serialize_unix(attr, **kwargs): # pylint: disable=unused-argument + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises SerializationError: if format invalid + :return: serialied unix + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError as exc: + raise TypeError("Unix time object must be valid Datetime object.") from exc + + +def rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + key = attr_desc["key"] + working_data = data + + while "." in key: + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(list[str], _FLATTEN.split(key)) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor( # pylint: disable=unused-argument, inconsistent-return-statements + attr, attr_desc, data +): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): # pylint: disable=unused-argument + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + :param str attr: The attribute to extract + :param dict attr_desc: The attribute description + :param dict data: The data to extract from + :rtype: object + :returns: The extracted attribute + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): # pylint: disable=unused-argument,too-many-return-statements + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer: + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes: Optional[Mapping[str, type]] = None) -> None: + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies: dict[str, type] = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): # pylint: disable=inconsistent-return-statements + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): # pylint: disable=protected-access + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, str): + return self.deserialize_data(data, response) + if isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None or data is CoreNull: + return data + try: + attributes = response._attribute_map # type: ignore # pylint: disable=protected-access + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name # type: ignore + raise DeserializationError(msg) from err + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :return: The classified target object and its class name. + :rtype: tuple + """ + if target is None: + return None, None + + if isinstance(target, str): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) # type: ignore # pylint: disable=protected-access + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ # type: ignore + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :param str content_type: Swagger "produces" if available. + :return: Deserialized object. + :rtype: object + """ + try: + return self(target_obj, data, content_type=content_type) + except: # pylint: disable=bare-except + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param obj raw_data: Data to be processed. + :param str content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + :rtype: object + :return: Unpacked content. + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (str, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param Response response: The response model class. + :param dict attrs: The deserialized response attributes. + :param dict additional_properties: Additional properties to be set. + :rtype: Response + :return: The instantiated response model. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("readonly") + ] + const = [ + k + for k, v in response._validation.items() # pylint: disable=protected-access # type: ignore + if v.get("constant") + ] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties # type: ignore + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore + raise DeserializationError(msg + str(err)) from err + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) from exp + + def deserialize_data(self, data, data_type): # pylint: disable=too-many-return-statements + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises DeserializationError: if deserialization fails. + :return: Deserialized object. + :rtype: object + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in [ # pylint: disable=unnecessary-lambda-assignment + "object", + "[]", + r"{}", + ] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise DeserializationError(msg) from err + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :return: Deserialized iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :return: Deserialized dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): # pylint: disable=too-many-return-statements + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :return: Deserialized object. + :rtype: dict + :raises TypeError: if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, str): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): # pylint: disable=too-many-return-statements + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :return: Deserialized basic type. + :rtype: str, int, float or bool + :raises TypeError: if string format is not valid or data_type is not one of str, int, float, bool. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + if isinstance(attr, str): + if attr.lower() in ["true", "1"]: + return True + if attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + if data_type == "int": + return int(attr) + if data_type == "float": + return float(attr) + raise TypeError("Unknown basic data type: {}".format(data_type)) + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :return: Deserialized string. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): # type: ignore + return data + except NameError: + return str(data) + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :return: Deserialized enum object. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + try: + return list(enum_obj.__members__.values())[data] + except IndexError as exc: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) from exc + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :return: Deserialized bytearray + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) # type: ignore + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :return: Deserialized base64 string + :rtype: bytearray + :raises TypeError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :return: Deserialized decimal + :raises DeserializationError: if string format invalid. + :rtype: decimal + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(str(attr)) # type: ignore + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise DeserializationError(msg) from err + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :return: Deserialized int + :rtype: long or int + :raises ValueError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) # type: ignore + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :return: Deserialized duration + :rtype: TimeDelta + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise DeserializationError(msg) from err + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :return: Deserialized date + :rtype: Date + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=0, defaultday=0) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :return: Deserialized time + :rtype: datetime.time + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized RFC datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) # type: ignore + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=datetime.timezone(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :return: Deserialized ISO datetime + :rtype: Datetime + :raises DeserializationError: if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() # type: ignore + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise DeserializationError(msg) from err + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :return: Deserialized datetime + :rtype: Datetime + :raises DeserializationError: if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) # type: ignore + try: + attr = int(attr) + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise DeserializationError(msg) from err + return date_obj diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_validation.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_validation.py new file mode 100644 index 000000000000..f5af3a4eb8a2 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_validation.py @@ -0,0 +1,66 @@ +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import functools + + +def api_version_validation(**kwargs): + params_added_on = kwargs.pop("params_added_on", {}) + method_added_on = kwargs.pop("method_added_on", "") + api_versions_list = kwargs.pop("api_versions_list", []) + + def _index_with_default(value: str, default: int = -1) -> int: + """Get the index of value in lst, or return default if not found. + + :param value: The value to search for in the api_versions_list. + :type value: str + :param default: The default value to return if the value is not found. + :type default: int + :return: The index of the value in the list, or the default value if not found. + :rtype: int + """ + try: + return api_versions_list.index(value) + except ValueError: + return default + + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + try: + # this assumes the client has an _api_version attribute + client = args[0] + client_api_version = client._config.api_version # pylint: disable=protected-access + except AttributeError: + return func(*args, **kwargs) + + if _index_with_default(method_added_on) > _index_with_default(client_api_version): + raise ValueError( + f"'{func.__name__}' is not available in API version " + f"{client_api_version}. Pass service API version {method_added_on} or newer to your client." + ) + + unsupported = { + parameter: api_version + for api_version, parameters in params_added_on.items() + for parameter in parameters + if parameter in kwargs and _index_with_default(api_version) > _index_with_default(client_api_version) + } + if unsupported: + raise ValueError( + "".join( + [ + f"'{param}' is not available in API version {client_api_version}. " + f"Use service API version {version} or newer.\n" + for param, version in unsupported.items() + ] + ) + ) + return func(*args, **kwargs) + + return wrapper + + return decorator diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_version.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_version.py new file mode 100644 index 000000000000..be71c81bd282 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/_version.py @@ -0,0 +1,9 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +VERSION = "1.0.0b1" diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/__init__.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/__init__.py new file mode 100644 index 000000000000..0efe01b94ee9 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/__init__.py @@ -0,0 +1,29 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._client import FileSharesMgmtClient # type: ignore + +try: + from ._patch import __all__ as _patch_all + from ._patch import * +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "FileSharesMgmtClient", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore + +_patch_sdk() diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/_client.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/_client.py new file mode 100644 index 000000000000..f03f8fbfaa0c --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/_client.py @@ -0,0 +1,172 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from copy import deepcopy +from typing import Any, Awaitable, Optional, TYPE_CHECKING, cast +from typing_extensions import Self + +from azure.core.pipeline import policies +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.settings import settings +from azure.mgmt.core import AsyncARMPipelineClient +from azure.mgmt.core.policies import AsyncARMAutoResourceProviderRegistrationPolicy +from azure.mgmt.core.tools import get_arm_endpoints + +from .._utils.serialization import Deserializer, Serializer +from ._configuration import FileSharesMgmtClientConfiguration +from .operations import ( + FileShareSnapshotsOperations, + FileSharesOperations, + InformationalOperationsOperations, + Operations, + PrivateEndpointConnectionsOperations, + PrivateLinkResourcesOperations, +) + +if TYPE_CHECKING: + from azure.core import AzureClouds + from azure.core.credentials_async import AsyncTokenCredential + + +class FileSharesMgmtClient: + """Azure File Shares Resource Provider API. + + :ivar file_shares: FileSharesOperations operations + :vartype file_shares: azure.mgmt.fileshares.aio.operations.FileSharesOperations + :ivar file_share_snapshots: FileShareSnapshotsOperations operations + :vartype file_share_snapshots: + azure.mgmt.fileshares.aio.operations.FileShareSnapshotsOperations + :ivar operations: Operations operations + :vartype operations: azure.mgmt.fileshares.aio.operations.Operations + :ivar informational_operations: InformationalOperationsOperations operations + :vartype informational_operations: + azure.mgmt.fileshares.aio.operations.InformationalOperationsOperations + :ivar private_endpoint_connections: PrivateEndpointConnectionsOperations operations + :vartype private_endpoint_connections: + azure.mgmt.fileshares.aio.operations.PrivateEndpointConnectionsOperations + :ivar private_link_resources: PrivateLinkResourcesOperations operations + :vartype private_link_resources: + azure.mgmt.fileshares.aio.operations.PrivateLinkResourcesOperations + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The ID of the target subscription. The value must be an UUID. Required. + :type subscription_id: str + :param base_url: Service host. Default value is None. + :type base_url: str + :keyword cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is + None. + :paramtype cloud_setting: ~azure.core.AzureClouds + :keyword api_version: The API version to use for this operation. Known values are "2026-06-01". + Default value is "2026-06-01". Note that overriding this default value may result in + unsupported behavior. + :paramtype api_version: str + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + base_url: Optional[str] = None, + *, + cloud_setting: Optional["AzureClouds"] = None, + **kwargs: Any + ) -> None: + _endpoint = "{endpoint}" + _cloud = cloud_setting or settings.current.azure_cloud # type: ignore + _endpoints = get_arm_endpoints(_cloud) + if not base_url: + base_url = _endpoints["resource_manager"] + credential_scopes = kwargs.pop("credential_scopes", _endpoints["credential_scopes"]) + self._config = FileSharesMgmtClientConfiguration( + credential=credential, + subscription_id=subscription_id, + base_url=cast(str, base_url), + cloud_setting=cloud_setting, + credential_scopes=credential_scopes, + **kwargs + ) + + _policies = kwargs.pop("policies", None) + if _policies is None: + _policies = [ + policies.RequestIdPolicy(**kwargs), + self._config.headers_policy, + self._config.user_agent_policy, + self._config.proxy_policy, + policies.ContentDecodePolicy(**kwargs), + AsyncARMAutoResourceProviderRegistrationPolicy(), + self._config.redirect_policy, + self._config.retry_policy, + self._config.authentication_policy, + self._config.custom_hook_policy, + self._config.logging_policy, + policies.DistributedTracingPolicy(**kwargs), + policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, + self._config.http_logging_policy, + ] + self._client: AsyncARMPipelineClient = AsyncARMPipelineClient( + base_url=cast(str, _endpoint), policies=_policies, **kwargs + ) + + self._serialize = Serializer() + self._deserialize = Deserializer() + self._serialize.client_side_validation = False + self.file_shares = FileSharesOperations(self._client, self._config, self._serialize, self._deserialize) + self.file_share_snapshots = FileShareSnapshotsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) + self.informational_operations = InformationalOperationsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.private_endpoint_connections = PrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.private_link_resources = PrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + + def send_request( + self, request: HttpRequest, *, stream: bool = False, **kwargs: Any + ) -> Awaitable[AsyncHttpResponse]: + """Runs the network request through the client's chained policies. + + >>> from azure.core.rest import HttpRequest + >>> request = HttpRequest("GET", "https://www.example.org/") + + >>> response = await client.send_request(request) + + + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request + + :param request: The network request you want to make. Required. + :type request: ~azure.core.rest.HttpRequest + :keyword bool stream: Whether the response payload will be streamed. Defaults to False. + :return: The response of your network call. Does not do error handling on your response. + :rtype: ~azure.core.rest.AsyncHttpResponse + """ + + request_copy = deepcopy(request) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + request_copy.url = self._client.format_url(request_copy.url, **path_format_arguments) + return self._client.send_request(request_copy, stream=stream, **kwargs) # type: ignore + + async def close(self) -> None: + await self._client.close() + + async def __aenter__(self) -> Self: + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details: Any) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/_configuration.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/_configuration.py new file mode 100644 index 000000000000..e772e8fd29bd --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/_configuration.py @@ -0,0 +1,80 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from typing import Any, Optional, TYPE_CHECKING + +from azure.core.pipeline import policies +from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy + +from .._version import VERSION + +if TYPE_CHECKING: + from azure.core import AzureClouds + from azure.core.credentials_async import AsyncTokenCredential + + +class FileSharesMgmtClientConfiguration: # pylint: disable=too-many-instance-attributes + """Configuration for FileSharesMgmtClient. + + Note that all parameters used to create this instance are saved as instance + attributes. + + :param credential: Credential used to authenticate requests to the service. Required. + :type credential: ~azure.core.credentials_async.AsyncTokenCredential + :param subscription_id: The ID of the target subscription. The value must be an UUID. Required. + :type subscription_id: str + :param base_url: Service host. Default value is "https://management.azure.com". + :type base_url: str + :param cloud_setting: The cloud setting for which to get the ARM endpoint. Default value is + None. + :type cloud_setting: ~azure.core.AzureClouds + :keyword api_version: The API version to use for this operation. Known values are "2026-06-01". + Default value is "2026-06-01". Note that overriding this default value may result in + unsupported behavior. + :paramtype api_version: str + """ + + def __init__( + self, + credential: "AsyncTokenCredential", + subscription_id: str, + base_url: str = "https://management.azure.com", + cloud_setting: Optional["AzureClouds"] = None, + **kwargs: Any + ) -> None: + api_version: str = kwargs.pop("api_version", "2026-06-01") + + if credential is None: + raise ValueError("Parameter 'credential' must not be None.") + if subscription_id is None: + raise ValueError("Parameter 'subscription_id' must not be None.") + + self.credential = credential + self.subscription_id = subscription_id + self.base_url = base_url + self.cloud_setting = cloud_setting + self.api_version = api_version + self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "mgmt-fileshares/{}".format(VERSION)) + self.polling_interval = kwargs.get("polling_interval", 30) + self._configure(**kwargs) + + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") + if self.credential and not self.authentication_policy: + self.authentication_policy = AsyncARMChallengeAuthenticationPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/_patch.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/_patch.py new file mode 100644 index 000000000000..ea765788358a --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/_patch.py @@ -0,0 +1,20 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/operations/__init__.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/operations/__init__.py new file mode 100644 index 000000000000..07fba52bd74b --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/operations/__init__.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._operations import FileSharesOperations # type: ignore +from ._operations import FileShareSnapshotsOperations # type: ignore +from ._operations import Operations # type: ignore +from ._operations import InformationalOperationsOperations # type: ignore +from ._operations import PrivateEndpointConnectionsOperations # type: ignore +from ._operations import PrivateLinkResourcesOperations # type: ignore + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "FileSharesOperations", + "FileShareSnapshotsOperations", + "Operations", + "InformationalOperationsOperations", + "PrivateEndpointConnectionsOperations", + "PrivateLinkResourcesOperations", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/operations/_operations.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/operations/_operations.py new file mode 100644 index 000000000000..00f96e403158 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/operations/_operations.py @@ -0,0 +1,3210 @@ +# pylint: disable=too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +from io import IOBase +import json +from typing import Any, AsyncIterator, Callable, IO, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core import AsyncPipelineClient +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.pipeline import PipelineResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod +from azure.core.rest import AsyncHttpResponse, HttpRequest +from azure.core.tracing.decorator import distributed_trace +from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models as _models +from ..._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize +from ..._utils.serialization import Deserializer, Serializer +from ..._validation import api_version_validation +from ...operations._operations import ( + build_file_share_snapshots_create_or_update_file_share_snapshot_request, + build_file_share_snapshots_delete_file_share_snapshot_request, + build_file_share_snapshots_get_file_share_snapshot_request, + build_file_share_snapshots_list_by_file_share_request, + build_file_share_snapshots_update_file_share_snapshot_request, + build_file_shares_check_name_availability_request, + build_file_shares_create_or_update_request, + build_file_shares_delete_request, + build_file_shares_get_request, + build_file_shares_list_by_parent_request, + build_file_shares_list_by_subscription_request, + build_file_shares_update_request, + build_informational_operations_get_limits_request, + build_informational_operations_get_provisioning_recommendation_request, + build_informational_operations_get_usage_data_request, + build_operations_list_request, + build_private_endpoint_connections_create_request, + build_private_endpoint_connections_delete_request, + build_private_endpoint_connections_get_request, + build_private_endpoint_connections_list_by_file_share_request, + build_private_link_resources_get_request, + build_private_link_resources_list_request, +) +from .._configuration import FileSharesMgmtClientConfiguration + +JSON = MutableMapping[str, Any] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, dict[str, Any]], Any]] +List = list + + +class FileSharesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.fileshares.aio.FileSharesMgmtClient`'s + :attr:`file_shares` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: FileSharesMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get(self, resource_group_name: str, resource_name: str, **kwargs: Any) -> _models.FileShare: + """Get a FileShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :return: FileShare. The FileShare is compatible with MutableMapping + :rtype: ~azure.mgmt.fileshares.models.FileShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FileShare] = kwargs.pop("cls", None) + + _request = build_file_shares_get_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileShare, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_initial( + self, + resource_group_name: str, + resource_name: str, + resource: Union[_models.FileShare, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_file_shares_create_or_update_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + resource_name: str, + resource: _models.FileShare, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FileShare]: + """Create or update a file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.fileshares.models.FileShare + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns FileShare. The FileShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.fileshares.models.FileShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + resource_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FileShare]: + """Create or update a file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns FileShare. The FileShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.fileshares.models.FileShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + resource_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FileShare]: + """Create or update a file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns FileShare. The FileShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.fileshares.models.FileShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, + resource_group_name: str, + resource_name: str, + resource: Union[_models.FileShare, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.FileShare]: + """Create or update a file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param resource: Resource create parameters. Is one of the following types: FileShare, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.fileshares.models.FileShare or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns FileShare. The FileShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.fileshares.models.FileShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FileShare] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_initial( + resource_group_name=resource_group_name, + resource_name=resource_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.FileShare, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.FileShare].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.FileShare]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _update_initial( + self, + resource_group_name: str, + resource_name: str, + properties: Union[_models.FileShareUpdate, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_file_shares_update_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update( + self, + resource_group_name: str, + resource_name: str, + properties: _models.FileShareUpdate, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FileShare]: + """Update a FileShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.fileshares.models.FileShareUpdate + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns FileShare. The FileShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.fileshares.models.FileShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + resource_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FileShare]: + """Update a FileShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns FileShare. The FileShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.fileshares.models.FileShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + resource_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FileShare]: + """Update a FileShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns FileShare. The FileShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.fileshares.models.FileShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update( + self, + resource_group_name: str, + resource_name: str, + properties: Union[_models.FileShareUpdate, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.FileShare]: + """Update a FileShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param properties: The resource properties to be updated. Is one of the following types: + FileShareUpdate, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.fileshares.models.FileShareUpdate or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns FileShare. The FileShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.fileshares.models.FileShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FileShare] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_initial( + resource_group_name=resource_group_name, + resource_name=resource_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.FileShare, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.FileShare].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.FileShare]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_initial( + self, resource_group_name: str, resource_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_file_shares_delete_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete(self, resource_group_name: str, resource_name: str, **kwargs: Any) -> AsyncLROPoller[None]: + """Delete a FileShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + resource_name=resource_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> AsyncItemPaged["_models.FileShare"]: + """List FileShare resources by subscription ID. + + :return: An iterator like instance of FileShare + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.fileshares.models.FileShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.FileShare]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_file_shares_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.FileShare], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_parent(self, resource_group_name: str, **kwargs: Any) -> AsyncItemPaged["_models.FileShare"]: + """List FileShare resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of FileShare + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.fileshares.models.FileShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.FileShare]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_file_shares_list_by_parent_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.FileShare], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + @overload + async def check_name_availability( + self, + location: str, + body: _models.CheckNameAvailabilityRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CheckNameAvailabilityResponse: + """Implements local CheckNameAvailability operations. + + :param location: The name of the Azure region. Required. + :type location: str + :param body: The CheckAvailability request. Required. + :type body: ~azure.mgmt.fileshares.models.CheckNameAvailabilityRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: CheckNameAvailabilityResponse. The CheckNameAvailabilityResponse is compatible with + MutableMapping + :rtype: ~azure.mgmt.fileshares.models.CheckNameAvailabilityResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def check_name_availability( + self, location: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.CheckNameAvailabilityResponse: + """Implements local CheckNameAvailability operations. + + :param location: The name of the Azure region. Required. + :type location: str + :param body: The CheckAvailability request. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: CheckNameAvailabilityResponse. The CheckNameAvailabilityResponse is compatible with + MutableMapping + :rtype: ~azure.mgmt.fileshares.models.CheckNameAvailabilityResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def check_name_availability( + self, location: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.CheckNameAvailabilityResponse: + """Implements local CheckNameAvailability operations. + + :param location: The name of the Azure region. Required. + :type location: str + :param body: The CheckAvailability request. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: CheckNameAvailabilityResponse. The CheckNameAvailabilityResponse is compatible with + MutableMapping + :rtype: ~azure.mgmt.fileshares.models.CheckNameAvailabilityResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def check_name_availability( + self, location: str, body: Union[_models.CheckNameAvailabilityRequest, JSON, IO[bytes]], **kwargs: Any + ) -> _models.CheckNameAvailabilityResponse: + """Implements local CheckNameAvailability operations. + + :param location: The name of the Azure region. Required. + :type location: str + :param body: The CheckAvailability request. Is one of the following types: + CheckNameAvailabilityRequest, JSON, IO[bytes] Required. + :type body: ~azure.mgmt.fileshares.models.CheckNameAvailabilityRequest or JSON or IO[bytes] + :return: CheckNameAvailabilityResponse. The CheckNameAvailabilityResponse is compatible with + MutableMapping + :rtype: ~azure.mgmt.fileshares.models.CheckNameAvailabilityResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.CheckNameAvailabilityResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_file_shares_check_name_availability_request( + location=location, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.CheckNameAvailabilityResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class FileShareSnapshotsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.fileshares.aio.FileSharesMgmtClient`'s + :attr:`file_share_snapshots` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: FileSharesMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get_file_share_snapshot( + self, resource_group_name: str, resource_name: str, name: str, **kwargs: Any + ) -> _models.FileShareSnapshot: + """Get a FileShareSnapshot. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param name: The name of the FileShareSnapshot. Required. + :type name: str + :return: FileShareSnapshot. The FileShareSnapshot is compatible with MutableMapping + :rtype: ~azure.mgmt.fileshares.models.FileShareSnapshot + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FileShareSnapshot] = kwargs.pop("cls", None) + + _request = build_file_share_snapshots_get_file_share_snapshot_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileShareSnapshot, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + async def _create_or_update_file_share_snapshot_initial( # pylint: disable=name-too-long + self, + resource_group_name: str, + resource_name: str, + name: str, + resource: Union[_models.FileShareSnapshot, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_file_share_snapshots_create_or_update_file_share_snapshot_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + name=name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create_or_update_file_share_snapshot( # pylint: disable=name-too-long + self, + resource_group_name: str, + resource_name: str, + name: str, + resource: _models.FileShareSnapshot, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FileShareSnapshot]: + """Create a FileShareSnapshot. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param name: The name of the FileShareSnapshot. Required. + :type name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.fileshares.models.FileShareSnapshot + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns FileShareSnapshot. The FileShareSnapshot is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.fileshares.models.FileShareSnapshot] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update_file_share_snapshot( # pylint: disable=name-too-long + self, + resource_group_name: str, + resource_name: str, + name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FileShareSnapshot]: + """Create a FileShareSnapshot. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param name: The name of the FileShareSnapshot. Required. + :type name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns FileShareSnapshot. The FileShareSnapshot is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.fileshares.models.FileShareSnapshot] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update_file_share_snapshot( # pylint: disable=name-too-long + self, + resource_group_name: str, + resource_name: str, + name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FileShareSnapshot]: + """Create a FileShareSnapshot. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param name: The name of the FileShareSnapshot. Required. + :type name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns FileShareSnapshot. The FileShareSnapshot is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.fileshares.models.FileShareSnapshot] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update_file_share_snapshot( # pylint: disable=name-too-long + self, + resource_group_name: str, + resource_name: str, + name: str, + resource: Union[_models.FileShareSnapshot, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.FileShareSnapshot]: + """Create a FileShareSnapshot. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param name: The name of the FileShareSnapshot. Required. + :type name: str + :param resource: Resource create parameters. Is one of the following types: FileShareSnapshot, + JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.fileshares.models.FileShareSnapshot or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns FileShareSnapshot. The FileShareSnapshot is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.fileshares.models.FileShareSnapshot] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FileShareSnapshot] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_or_update_file_share_snapshot_initial( + resource_group_name=resource_group_name, + resource_name=resource_name, + name=name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response_headers = {} + response = pipeline_response.http_response + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = _deserialize(_models.FileShareSnapshot, response.json()) + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.FileShareSnapshot].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.FileShareSnapshot]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _update_file_share_snapshot_initial( + self, + resource_group_name: str, + resource_name: str, + name: str, + properties: Union[_models.FileShareSnapshotUpdate, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_file_share_snapshots_update_file_share_snapshot_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + name=name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_update_file_share_snapshot( + self, + resource_group_name: str, + resource_name: str, + name: str, + properties: _models.FileShareSnapshotUpdate, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FileShareSnapshot]: + """Update a FileShareSnapshot. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param name: The name of the FileShareSnapshot. Required. + :type name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.fileshares.models.FileShareSnapshotUpdate + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns FileShareSnapshot. The FileShareSnapshot is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.fileshares.models.FileShareSnapshot] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update_file_share_snapshot( + self, + resource_group_name: str, + resource_name: str, + name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FileShareSnapshot]: + """Update a FileShareSnapshot. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param name: The name of the FileShareSnapshot. Required. + :type name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns FileShareSnapshot. The FileShareSnapshot is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.fileshares.models.FileShareSnapshot] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update_file_share_snapshot( + self, + resource_group_name: str, + resource_name: str, + name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FileShareSnapshot]: + """Update a FileShareSnapshot. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param name: The name of the FileShareSnapshot. Required. + :type name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns FileShareSnapshot. The FileShareSnapshot is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.fileshares.models.FileShareSnapshot] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_update_file_share_snapshot( + self, + resource_group_name: str, + resource_name: str, + name: str, + properties: Union[_models.FileShareSnapshotUpdate, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.FileShareSnapshot]: + """Update a FileShareSnapshot. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param name: The name of the FileShareSnapshot. Required. + :type name: str + :param properties: The resource properties to be updated. Is one of the following types: + FileShareSnapshotUpdate, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.fileshares.models.FileShareSnapshotUpdate or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns FileShareSnapshot. The FileShareSnapshot is + compatible with MutableMapping + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.fileshares.models.FileShareSnapshot] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FileShareSnapshot] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._update_file_share_snapshot_initial( + resource_group_name=resource_group_name, + resource_name=resource_name, + name=name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.FileShareSnapshot, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.FileShareSnapshot].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.FileShareSnapshot]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + async def _delete_file_share_snapshot_initial( + self, resource_group_name: str, resource_name: str, name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_file_share_snapshots_delete_file_share_snapshot_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def begin_delete_file_share_snapshot( + self, resource_group_name: str, resource_name: str, name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Delete a FileShareSnapshot. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param name: The name of the FileShareSnapshot. Required. + :type name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_file_share_snapshot_initial( + resource_group_name=resource_group_name, + resource_name=resource_name, + name=name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_file_share( + self, resource_group_name: str, resource_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.FileShareSnapshot"]: + """List FileShareSnapshot by FileShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :return: An iterator like instance of FileShareSnapshot + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.fileshares.models.FileShareSnapshot] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.FileShareSnapshot]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_file_share_snapshots_list_by_file_share_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.FileShareSnapshot], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class Operations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.fileshares.aio.FileSharesMgmtClient`'s + :attr:`operations` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: FileSharesMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list(self, **kwargs: Any) -> AsyncItemPaged["_models.Operation"]: + """List the operations for the provider. + + :return: An iterator like instance of Operation + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.fileshares.models.Operation] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Operation]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_operations_list_request( + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Operation], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class InformationalOperationsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.fileshares.aio.FileSharesMgmtClient`'s + :attr:`informational_operations` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: FileSharesMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + async def get_usage_data(self, location: str, **kwargs: Any) -> _models.FileShareUsageDataResponse: + """Get file shares usage data. + + :param location: The name of the Azure region. Required. + :type location: str + :return: FileShareUsageDataResponse. The FileShareUsageDataResponse is compatible with + MutableMapping + :rtype: ~azure.mgmt.fileshares.models.FileShareUsageDataResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FileShareUsageDataResponse] = kwargs.pop("cls", None) + + _request = build_informational_operations_get_usage_data_request( + location=location, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileShareUsageDataResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + async def get_limits(self, location: str, **kwargs: Any) -> _models.FileShareLimitsResponse: + """Get file shares limits. + + :param location: The name of the Azure region. Required. + :type location: str + :return: FileShareLimitsResponse. The FileShareLimitsResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.fileshares.models.FileShareLimitsResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FileShareLimitsResponse] = kwargs.pop("cls", None) + + _request = build_informational_operations_get_limits_request( + location=location, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileShareLimitsResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + async def get_provisioning_recommendation( + self, + location: str, + body: _models.FileShareProvisioningRecommendationRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.FileShareProvisioningRecommendationResponse: + """Get file shares provisioning parameters recommendation. + + :param location: The name of the Azure region. Required. + :type location: str + :param body: The request body. Required. + :type body: ~azure.mgmt.fileshares.models.FileShareProvisioningRecommendationRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: FileShareProvisioningRecommendationResponse. The + FileShareProvisioningRecommendationResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.fileshares.models.FileShareProvisioningRecommendationResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def get_provisioning_recommendation( + self, location: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.FileShareProvisioningRecommendationResponse: + """Get file shares provisioning parameters recommendation. + + :param location: The name of the Azure region. Required. + :type location: str + :param body: The request body. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: FileShareProvisioningRecommendationResponse. The + FileShareProvisioningRecommendationResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.fileshares.models.FileShareProvisioningRecommendationResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def get_provisioning_recommendation( + self, location: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.FileShareProvisioningRecommendationResponse: + """Get file shares provisioning parameters recommendation. + + :param location: The name of the Azure region. Required. + :type location: str + :param body: The request body. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: FileShareProvisioningRecommendationResponse. The + FileShareProvisioningRecommendationResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.fileshares.models.FileShareProvisioningRecommendationResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def get_provisioning_recommendation( + self, + location: str, + body: Union[_models.FileShareProvisioningRecommendationRequest, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.FileShareProvisioningRecommendationResponse: + """Get file shares provisioning parameters recommendation. + + :param location: The name of the Azure region. Required. + :type location: str + :param body: The request body. Is one of the following types: + FileShareProvisioningRecommendationRequest, JSON, IO[bytes] Required. + :type body: ~azure.mgmt.fileshares.models.FileShareProvisioningRecommendationRequest or JSON or + IO[bytes] + :return: FileShareProvisioningRecommendationResponse. The + FileShareProvisioningRecommendationResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.fileshares.models.FileShareProvisioningRecommendationResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FileShareProvisioningRecommendationResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_informational_operations_get_provisioning_recommendation_request( + location=location, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileShareProvisioningRecommendationResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class PrivateEndpointConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.fileshares.aio.FileSharesMgmtClient`'s + :attr:`private_endpoint_connections` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: FileSharesMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-09-01-preview", + params_added_on={ + "2025-09-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "resource_name", + "private_endpoint_connection_name", + "accept", + ] + }, + api_versions_list=["2025-09-01-preview", "2026-06-01"], + ) + async def get( + self, resource_group_name: str, resource_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> _models.PrivateEndpointConnection: + """Gets the specified private endpoint connection associated with the file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: PrivateEndpointConnection. The PrivateEndpointConnection is compatible with + MutableMapping + :rtype: ~azure.mgmt.fileshares.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None) + + _request = build_private_endpoint_connections_get_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.PrivateEndpointConnection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @api_version_validation( + method_added_on="2025-09-01-preview", + params_added_on={ + "2025-09-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "resource_name", + "private_endpoint_connection_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-09-01-preview", "2026-06-01"], + ) + async def _create_initial( + self, + resource_group_name: str, + resource_name: str, + private_endpoint_connection_name: str, + resource: Union[_models.PrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_private_endpoint_connections_create_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + async def begin_create( + self, + resource_group_name: str, + resource_name: str, + private_endpoint_connection_name: str, + resource: _models.PrivateEndpointConnection, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.PrivateEndpointConnection]: + """Update the state of specified private endpoint connection associated with the file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: The private endpoint connection properties. Required. + :type resource: ~azure.mgmt.fileshares.models.PrivateEndpointConnection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns PrivateEndpointConnection. The + PrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.fileshares.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create( + self, + resource_group_name: str, + resource_name: str, + private_endpoint_connection_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.PrivateEndpointConnection]: + """Update the state of specified private endpoint connection associated with the file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: The private endpoint connection properties. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns PrivateEndpointConnection. The + PrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.fileshares.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create( + self, + resource_group_name: str, + resource_name: str, + private_endpoint_connection_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.PrivateEndpointConnection]: + """Update the state of specified private endpoint connection associated with the file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: The private endpoint connection properties. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of AsyncLROPoller that returns PrivateEndpointConnection. The + PrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.fileshares.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-09-01-preview", + params_added_on={ + "2025-09-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "resource_name", + "private_endpoint_connection_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-09-01-preview", "2026-06-01"], + ) + async def begin_create( + self, + resource_group_name: str, + resource_name: str, + private_endpoint_connection_name: str, + resource: Union[_models.PrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> AsyncLROPoller[_models.PrivateEndpointConnection]: + """Update the state of specified private endpoint connection associated with the file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: The private endpoint connection properties. Is one of the following types: + PrivateEndpointConnection, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.fileshares.models.PrivateEndpointConnection or JSON or IO[bytes] + :return: An instance of AsyncLROPoller that returns PrivateEndpointConnection. The + PrivateEndpointConnection is compatible with MutableMapping + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.fileshares.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._create_initial( + resource_group_name=resource_group_name, + resource_name=resource_name, + private_endpoint_connection_name=private_endpoint_connection_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.PrivateEndpointConnection, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[_models.PrivateEndpointConnection].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[_models.PrivateEndpointConnection]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @api_version_validation( + method_added_on="2025-09-01-preview", + params_added_on={ + "2025-09-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "resource_name", + "private_endpoint_connection_name", + ] + }, + api_versions_list=["2025-09-01-preview", "2026-06-01"], + ) + async def _delete_initial( + self, resource_group_name: str, resource_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> AsyncIterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[AsyncIterator[bytes]] = kwargs.pop("cls", None) + + _request = build_private_endpoint_connections_delete_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-09-01-preview", + params_added_on={ + "2025-09-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "resource_name", + "private_endpoint_connection_name", + ] + }, + api_versions_list=["2025-09-01-preview", "2026-06-01"], + ) + async def begin_delete( + self, resource_group_name: str, resource_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> AsyncLROPoller[None]: + """Deletes the specified private endpoint connection associated with the file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: An instance of AsyncLROPoller that returns None + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._delete_initial( + resource_group_name=resource_group_name, + resource_name=resource_name, + private_endpoint_connection_name=private_endpoint_connection_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + await raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-09-01-preview", + params_added_on={ + "2025-09-01-preview": ["api_version", "subscription_id", "resource_group_name", "resource_name", "accept"] + }, + api_versions_list=["2025-09-01-preview", "2026-06-01"], + ) + def list_by_file_share( + self, resource_group_name: str, resource_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.PrivateEndpointConnection"]: + """Get a PrivateEndpointConnection List. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :return: An iterator like instance of PrivateEndpointConnection + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.fileshares.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.PrivateEndpointConnection]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_private_endpoint_connections_list_by_file_share_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.PrivateEndpointConnection], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) + + +class PrivateLinkResourcesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.fileshares.aio.FileSharesMgmtClient`'s + :attr:`private_link_resources` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: FileSharesMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace_async + @api_version_validation( + method_added_on="2025-09-01-preview", + params_added_on={ + "2025-09-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "resource_name", + "private_link_resource_name", + "accept", + ] + }, + api_versions_list=["2025-09-01-preview", "2026-06-01"], + ) + async def get( + self, resource_group_name: str, resource_name: str, private_link_resource_name: str, **kwargs: Any + ) -> _models.PrivateLinkResource: + """Gets the private link resources that need to be created for a file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param private_link_resource_name: The name of the private link resource. Required. + :type private_link_resource_name: str + :return: PrivateLinkResource. The PrivateLinkResource is compatible with MutableMapping + :rtype: ~azure.mgmt.fileshares.models.PrivateLinkResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.PrivateLinkResource] = kwargs.pop("cls", None) + + _request = build_private_link_resources_get_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + private_link_resource_name=private_link_resource_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + await response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.PrivateLinkResource, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-09-01-preview", + params_added_on={ + "2025-09-01-preview": ["api_version", "subscription_id", "resource_group_name", "resource_name", "accept"] + }, + api_versions_list=["2025-09-01-preview", "2026-06-01"], + ) + def list( + self, resource_group_name: str, resource_name: str, **kwargs: Any + ) -> AsyncItemPaged["_models.PrivateLinkResource"]: + """Gets the private link resources that need to be created for a file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :return: An iterator like instance of PrivateLinkResource + :rtype: + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.fileshares.models.PrivateLinkResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.PrivateLinkResource]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_private_link_resources_list_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + async def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.PrivateLinkResource], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged(get_next, extract_data) diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/operations/_patch.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/operations/_patch.py new file mode 100644 index 000000000000..ea765788358a --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/aio/operations/_patch.py @@ -0,0 +1,20 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/models/__init__.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/models/__init__.py new file mode 100644 index 000000000000..64ae84278539 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/models/__init__.py @@ -0,0 +1,130 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + + +from ._models import ( # type: ignore + CheckNameAvailabilityRequest, + CheckNameAvailabilityResponse, + ErrorAdditionalInfo, + ErrorDetail, + ErrorResponse, + FileShare, + FileShareLimits, + FileShareLimitsOutput, + FileShareLimitsResponse, + FileShareProperties, + FileShareProvisioningConstants, + FileShareProvisioningRecommendationInput, + FileShareProvisioningRecommendationOutput, + FileShareProvisioningRecommendationRequest, + FileShareProvisioningRecommendationResponse, + FileShareSnapshot, + FileShareSnapshotProperties, + FileShareSnapshotUpdate, + FileShareSnapshotUpdateProperties, + FileShareUpdate, + FileShareUpdateProperties, + FileShareUsageDataOutput, + FileShareUsageDataResponse, + LiveSharesUsageData, + NfsProtocolProperties, + Operation, + OperationDisplay, + PrivateEndpoint, + PrivateEndpointConnection, + PrivateEndpointConnectionProperties, + PrivateLinkResource, + PrivateLinkResourceProperties, + PrivateLinkServiceConnectionState, + ProxyResource, + PublicAccessProperties, + Resource, + SystemData, + TrackedResource, +) + +from ._enums import ( # type: ignore + ActionType, + CheckNameAvailabilityReason, + CreatedByType, + EncryptionInTransitRequired, + FileShareProvisioningState, + MediaTier, + Origin, + PrivateEndpointConnectionProvisioningState, + PrivateEndpointServiceConnectionStatus, + Protocol, + PublicNetworkAccess, + Redundancy, + ShareRootSquash, +) +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "CheckNameAvailabilityRequest", + "CheckNameAvailabilityResponse", + "ErrorAdditionalInfo", + "ErrorDetail", + "ErrorResponse", + "FileShare", + "FileShareLimits", + "FileShareLimitsOutput", + "FileShareLimitsResponse", + "FileShareProperties", + "FileShareProvisioningConstants", + "FileShareProvisioningRecommendationInput", + "FileShareProvisioningRecommendationOutput", + "FileShareProvisioningRecommendationRequest", + "FileShareProvisioningRecommendationResponse", + "FileShareSnapshot", + "FileShareSnapshotProperties", + "FileShareSnapshotUpdate", + "FileShareSnapshotUpdateProperties", + "FileShareUpdate", + "FileShareUpdateProperties", + "FileShareUsageDataOutput", + "FileShareUsageDataResponse", + "LiveSharesUsageData", + "NfsProtocolProperties", + "Operation", + "OperationDisplay", + "PrivateEndpoint", + "PrivateEndpointConnection", + "PrivateEndpointConnectionProperties", + "PrivateLinkResource", + "PrivateLinkResourceProperties", + "PrivateLinkServiceConnectionState", + "ProxyResource", + "PublicAccessProperties", + "Resource", + "SystemData", + "TrackedResource", + "ActionType", + "CheckNameAvailabilityReason", + "CreatedByType", + "EncryptionInTransitRequired", + "FileShareProvisioningState", + "MediaTier", + "Origin", + "PrivateEndpointConnectionProvisioningState", + "PrivateEndpointServiceConnectionStatus", + "Protocol", + "PublicNetworkAccess", + "Redundancy", + "ShareRootSquash", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/models/_enums.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/models/_enums.py new file mode 100644 index 000000000000..cb6e75e2640f --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/models/_enums.py @@ -0,0 +1,161 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum +from azure.core import CaseInsensitiveEnumMeta + + +class ActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Extensible enum. Indicates the action type. "Internal" refers to actions that are for internal + only APIs. + """ + + INTERNAL = "Internal" + """Actions are for internal-only APIs.""" + + +class CheckNameAvailabilityReason(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Possible reasons for a name not being available.""" + + INVALID = "Invalid" + """Name is invalid.""" + ALREADY_EXISTS = "AlreadyExists" + """Name already exists.""" + + +class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The kind of entity that created the resource.""" + + USER = "User" + """The entity was created by a user.""" + APPLICATION = "Application" + """The entity was created by an application.""" + MANAGED_IDENTITY = "ManagedIdentity" + """The entity was created by a managed identity.""" + KEY = "Key" + """The entity was created by a key.""" + + +class EncryptionInTransitRequired(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of NFS encryption in transit.""" + + ENABLED = "Enabled" + """Encryption in Transit is enabled.""" + DISABLED = "Disabled" + """Encryption in Transit is disabled.""" + + +class FileShareProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The status of file share's ProvisioningState.""" + + SUCCEEDED = "Succeeded" + """Resource has been created.""" + FAILED = "Failed" + """Resource creation failed.""" + CANCELED = "Canceled" + """Resource creation was canceled.""" + PROVISIONING = "Provisioning" + """The operation is provisioning.""" + UPDATING = "Updating" + """The operation is updating.""" + DELETING = "Deleting" + """The operation is deleting.""" + ACCEPTED = "Accepted" + """The operation is accepted.""" + CREATED = "Created" + """The resource has been created.""" + TRANSIENT_FAILURE = "TransientFailure" + """The operation is in a transient failure state.""" + CREATING = "Creating" + """The resource is being created.""" + PATCHING = "Patching" + """The resource is being patched.""" + POSTING = "Posting" + """The resource is being posted.""" + + +class MediaTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Media Tier enum.""" + + SSD = "SSD" + """SSD media tier.""" + + +class Origin(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The intended executor of the operation; as in Resource Based Access Control (RBAC) and audit + logs UX. Default value is "user,system". + """ + + USER = "user" + """Indicates the operation is initiated by a user.""" + SYSTEM = "system" + """Indicates the operation is initiated by a system.""" + USER_SYSTEM = "user,system" + """Indicates the operation is initiated by a user or system.""" + + +class PrivateEndpointConnectionProvisioningState( # pylint: disable=name-too-long + str, Enum, metaclass=CaseInsensitiveEnumMeta +): + """The current provisioning state.""" + + SUCCEEDED = "Succeeded" + """Connection has been provisioned.""" + CREATING = "Creating" + """Connection is being created.""" + DELETING = "Deleting" + """Connection is being deleted.""" + FAILED = "Failed" + """Connection provisioning has failed.""" + + +class PrivateEndpointServiceConnectionStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The private endpoint connection status.""" + + PENDING = "Pending" + """Connection waiting for approval or rejection.""" + APPROVED = "Approved" + """Connection approved.""" + REJECTED = "Rejected" + """Connection Rejected.""" + + +class Protocol(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Protocol enum.""" + + NFS = "NFS" + """NFS protocol.""" + + +class PublicNetworkAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of the public network access.""" + + ENABLED = "Enabled" + """The public network access is enabled.""" + DISABLED = "Disabled" + """The public network access is disabled.""" + + +class Redundancy(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Redundancy enum.""" + + LOCAL = "Local" + """Local redundancy.""" + ZONE = "Zone" + """Zone redundancy.""" + + +class ShareRootSquash(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Share root squash enum.""" + + NO_ROOT_SQUASH = "NoRootSquash" + """No root squash.""" + ROOT_SQUASH = "RootSquash" + """Root squash.""" + ALL_SQUASH = "AllSquash" + """All squash.""" diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/models/_models.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/models/_models.py new file mode 100644 index 000000000000..e9cc10249be3 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/models/_models.py @@ -0,0 +1,1585 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=useless-super-delegation + +import datetime +from typing import Any, Mapping, Optional, TYPE_CHECKING, Union, overload + +from .._utils.model_base import Model as _Model, rest_field + +if TYPE_CHECKING: + from .. import models as _models + + +class CheckNameAvailabilityRequest(_Model): + """The check availability request body. + + :ivar name: The name of the resource for which availability needs to be checked. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + """ + + name: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The name of the resource for which availability needs to be checked.""" + type: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The resource type.""" + + @overload + def __init__( + self, + *, + name: Optional[str] = None, + type: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class CheckNameAvailabilityResponse(_Model): + """The check availability result. + + :ivar name_available: Indicates if the resource name is available. + :vartype name_available: bool + :ivar reason: The reason why the given name is not available. Known values are: "Invalid" and + "AlreadyExists". + :vartype reason: str or ~azure.mgmt.fileshares.models.CheckNameAvailabilityReason + :ivar message: Detailed reason why the given name is not available. + :vartype message: str + """ + + name_available: Optional[bool] = rest_field( + name="nameAvailable", visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates if the resource name is available.""" + reason: Optional[Union[str, "_models.CheckNameAvailabilityReason"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The reason why the given name is not available. Known values are: \"Invalid\" and + \"AlreadyExists\".""" + message: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Detailed reason why the given name is not available.""" + + @overload + def __init__( + self, + *, + name_available: Optional[bool] = None, + reason: Optional[Union[str, "_models.CheckNameAvailabilityReason"]] = None, + message: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ErrorAdditionalInfo(_Model): + """The resource management error additional info. + + :ivar type: The additional info type. + :vartype type: str + :ivar info: The additional info. + :vartype info: any + """ + + type: Optional[str] = rest_field(visibility=["read"]) + """The additional info type.""" + info: Optional[Any] = rest_field(visibility=["read"]) + """The additional info.""" + + +class ErrorDetail(_Model): + """The error detail. + + :ivar code: The error code. + :vartype code: str + :ivar message: The error message. + :vartype message: str + :ivar target: The error target. + :vartype target: str + :ivar details: The error details. + :vartype details: list[~azure.mgmt.fileshares.models.ErrorDetail] + :ivar additional_info: The error additional info. + :vartype additional_info: list[~azure.mgmt.fileshares.models.ErrorAdditionalInfo] + """ + + code: Optional[str] = rest_field(visibility=["read"]) + """The error code.""" + message: Optional[str] = rest_field(visibility=["read"]) + """The error message.""" + target: Optional[str] = rest_field(visibility=["read"]) + """The error target.""" + details: Optional[list["_models.ErrorDetail"]] = rest_field(visibility=["read"]) + """The error details.""" + additional_info: Optional[list["_models.ErrorAdditionalInfo"]] = rest_field( + name="additionalInfo", visibility=["read"] + ) + """The error additional info.""" + + +class ErrorResponse(_Model): + """Error response. + + :ivar error: The error object. + :vartype error: ~azure.mgmt.fileshares.models.ErrorDetail + """ + + error: Optional["_models.ErrorDetail"] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The error object.""" + + @overload + def __init__( + self, + *, + error: Optional["_models.ErrorDetail"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Resource(_Model): + """Resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.fileshares.models.SystemData + """ + + id: Optional[str] = rest_field(visibility=["read"]) + """Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}.""" + name: Optional[str] = rest_field(visibility=["read"]) + """The name of the resource.""" + type: Optional[str] = rest_field(visibility=["read"]) + """The type of the resource. E.g. \"Microsoft.Compute/virtualMachines\" or + \"Microsoft.Storage/storageAccounts\".""" + system_data: Optional["_models.SystemData"] = rest_field(name="systemData", visibility=["read"]) + """Azure Resource Manager metadata containing createdBy and modifiedBy information.""" + + +class TrackedResource(Resource): + """Tracked Resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.fileshares.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + """ + + tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Resource tags.""" + location: str = rest_field(visibility=["read", "create"]) + """The geo-location where the resource lives. Required.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileShare(TrackedResource): + """File share resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.fileshares.models.SystemData + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar location: The geo-location where the resource lives. Required. + :vartype location: str + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.fileshares.models.FileShareProperties + """ + + properties: Optional["_models.FileShareProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + location: str, + tags: Optional[dict[str, str]] = None, + properties: Optional["_models.FileShareProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileShareLimits(_Model): + """File share-related limits in the specified subscription/location. + + :ivar max_file_shares: The maximum number of file shares that can be created. Required. + :vartype max_file_shares: int + :ivar max_file_share_snapshots: The maximum number of snapshots allowed per file share. + Required. + :vartype max_file_share_snapshots: int + :ivar max_file_share_subnets: The maximum number of subnets that can be associated with a file + share. Required. + :vartype max_file_share_subnets: int + :ivar max_file_share_private_endpoint_connections: The maximum number of private endpoint + connections allowed for a file share. Required. + :vartype max_file_share_private_endpoint_connections: int + :ivar min_provisioned_storage_gi_b: The minimum provisioned storage in GiB for a file share. + Required. + :vartype min_provisioned_storage_gi_b: int + :ivar max_provisioned_storage_gi_b: The maximum provisioned storage in GiB for a file share. + Required. + :vartype max_provisioned_storage_gi_b: int + :ivar min_provisioned_io_per_sec: The minimum provisioned IOPS (Input/Output Operations Per + Second) for a file share. Required. + :vartype min_provisioned_io_per_sec: int + :ivar max_provisioned_io_per_sec: The maximum provisioned IOPS (Input/Output Operations Per + Second) for a file share. Required. + :vartype max_provisioned_io_per_sec: int + :ivar min_provisioned_throughput_mi_b_per_sec: The minimum provisioned throughput in MiB/s for + a file share. Required. + :vartype min_provisioned_throughput_mi_b_per_sec: int + :ivar max_provisioned_throughput_mi_b_per_sec: The maximum provisioned throughput in MiB/s for + a file share. Required. + :vartype max_provisioned_throughput_mi_b_per_sec: int + """ + + max_file_shares: int = rest_field(name="maxFileShares", visibility=["read", "create", "update", "delete", "query"]) + """The maximum number of file shares that can be created. Required.""" + max_file_share_snapshots: int = rest_field( + name="maxFileShareSnapshots", visibility=["read", "create", "update", "delete", "query"] + ) + """The maximum number of snapshots allowed per file share. Required.""" + max_file_share_subnets: int = rest_field( + name="maxFileShareSubnets", visibility=["read", "create", "update", "delete", "query"] + ) + """The maximum number of subnets that can be associated with a file share. Required.""" + max_file_share_private_endpoint_connections: int = rest_field( + name="maxFileSharePrivateEndpointConnections", visibility=["read", "create", "update", "delete", "query"] + ) + """The maximum number of private endpoint connections allowed for a file share. Required.""" + min_provisioned_storage_gi_b: int = rest_field( + name="minProvisionedStorageGiB", visibility=["read", "create", "update", "delete", "query"] + ) + """The minimum provisioned storage in GiB for a file share. Required.""" + max_provisioned_storage_gi_b: int = rest_field( + name="maxProvisionedStorageGiB", visibility=["read", "create", "update", "delete", "query"] + ) + """The maximum provisioned storage in GiB for a file share. Required.""" + min_provisioned_io_per_sec: int = rest_field( + name="minProvisionedIOPerSec", visibility=["read", "create", "update", "delete", "query"] + ) + """The minimum provisioned IOPS (Input/Output Operations Per Second) for a file share. Required.""" + max_provisioned_io_per_sec: int = rest_field( + name="maxProvisionedIOPerSec", visibility=["read", "create", "update", "delete", "query"] + ) + """The maximum provisioned IOPS (Input/Output Operations Per Second) for a file share. Required.""" + min_provisioned_throughput_mi_b_per_sec: int = rest_field( + name="minProvisionedThroughputMiBPerSec", visibility=["read", "create", "update", "delete", "query"] + ) + """The minimum provisioned throughput in MiB/s for a file share. Required.""" + max_provisioned_throughput_mi_b_per_sec: int = rest_field( + name="maxProvisionedThroughputMiBPerSec", visibility=["read", "create", "update", "delete", "query"] + ) + """The maximum provisioned throughput in MiB/s for a file share. Required.""" + + @overload + def __init__( + self, + *, + max_file_shares: int, + max_file_share_snapshots: int, + max_file_share_subnets: int, + max_file_share_private_endpoint_connections: int, + min_provisioned_storage_gi_b: int, + max_provisioned_storage_gi_b: int, + min_provisioned_io_per_sec: int, + max_provisioned_io_per_sec: int, + min_provisioned_throughput_mi_b_per_sec: int, + max_provisioned_throughput_mi_b_per_sec: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileShareLimitsOutput(_Model): + """File share limits API result. + + :ivar limits: The limits for the file share. Required. + :vartype limits: ~azure.mgmt.fileshares.models.FileShareLimits + :ivar provisioning_constants: The provisioning constants for the file share. Required. + :vartype provisioning_constants: ~azure.mgmt.fileshares.models.FileShareProvisioningConstants + """ + + limits: "_models.FileShareLimits" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The limits for the file share. Required.""" + provisioning_constants: "_models.FileShareProvisioningConstants" = rest_field( + name="provisioningConstants", visibility=["read", "create", "update", "delete", "query"] + ) + """The provisioning constants for the file share. Required.""" + + @overload + def __init__( + self, + *, + limits: "_models.FileShareLimits", + provisioning_constants: "_models.FileShareProvisioningConstants", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileShareLimitsResponse(_Model): + """Response structure for file share limits API. + + :ivar properties: The properties of the file share limits. Required. + :vartype properties: ~azure.mgmt.fileshares.models.FileShareLimitsOutput + """ + + properties: "_models.FileShareLimitsOutput" = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The properties of the file share limits. Required.""" + + @overload + def __init__( + self, + *, + properties: "_models.FileShareLimitsOutput", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileShareProperties(_Model): + """File share properties. + + :ivar mount_name: The name of the file share as seen by the end user when mounting the share, + such as in a URI or UNC format in their operating system. + :vartype mount_name: str + :ivar host_name: The host name of the file share. + :vartype host_name: str + :ivar media_tier: The storage media tier of the file share. "SSD" + :vartype media_tier: str or ~azure.mgmt.fileshares.models.MediaTier + :ivar redundancy: The chosen redundancy level of the file share. Known values are: "Local" and + "Zone". + :vartype redundancy: str or ~azure.mgmt.fileshares.models.Redundancy + :ivar protocol: The file sharing protocol for this file share. "NFS" + :vartype protocol: str or ~azure.mgmt.fileshares.models.Protocol + :ivar provisioned_storage_gi_b: The provisioned storage size of the share in GiB (1 GiB is + 1024^3 bytes or 1073741824 bytes). A component of the file share's bill is the provisioned + storage, regardless of the amount of used storage. + :vartype provisioned_storage_gi_b: int + :ivar provisioned_storage_next_allowed_downgrade: A date/time value that specifies when the + provisioned storage for the file share is permitted to be reduced. + :vartype provisioned_storage_next_allowed_downgrade: ~datetime.datetime + :ivar provisioned_io_per_sec: The provisioned IO / sec of the share. + :vartype provisioned_io_per_sec: int + :ivar provisioned_io_per_sec_next_allowed_downgrade: A date/time value that specifies when the + provisioned IOPS for the file share is permitted to be reduced. + :vartype provisioned_io_per_sec_next_allowed_downgrade: ~datetime.datetime + :ivar provisioned_throughput_mi_b_per_sec: The provisioned throughput / sec of the share. + :vartype provisioned_throughput_mi_b_per_sec: int + :ivar provisioned_throughput_next_allowed_downgrade: A date/time value that specifies when the + provisioned throughput for the file share is permitted to be reduced. + :vartype provisioned_throughput_next_allowed_downgrade: ~datetime.datetime + :ivar included_burst_io_per_sec: Burst IOPS are extra buffer IOPS enabling you to consume more + than your provisioned IOPS for a short period of time, depending on the burst credits available + for your share. + :vartype included_burst_io_per_sec: int + :ivar max_burst_io_per_sec_credits: Max burst IOPS credits shows the maximum number of burst + credits the share can have at the current IOPS provisioning level. + :vartype max_burst_io_per_sec_credits: int + :ivar nfs_protocol_properties: Protocol settings specific NFS. + :vartype nfs_protocol_properties: ~azure.mgmt.fileshares.models.NfsProtocolProperties + :ivar public_access_properties: The set of properties for control public access. + :vartype public_access_properties: ~azure.mgmt.fileshares.models.PublicAccessProperties + :ivar provisioning_state: The status of the last operation. Known values are: "Succeeded", + "Failed", "Canceled", "Provisioning", "Updating", "Deleting", "Accepted", "Created", + "TransientFailure", "Creating", "Patching", and "Posting". + :vartype provisioning_state: str or ~azure.mgmt.fileshares.models.FileShareProvisioningState + :ivar public_network_access: Gets or sets allow or disallow public network access to azure + managed file share. Known values are: "Enabled" and "Disabled". + :vartype public_network_access: str or ~azure.mgmt.fileshares.models.PublicNetworkAccess + :ivar private_endpoint_connections: The list of associated private endpoint connections. + :vartype private_endpoint_connections: + list[~azure.mgmt.fileshares.models.PrivateEndpointConnection] + """ + + mount_name: Optional[str] = rest_field(name="mountName", visibility=["read", "create"]) + """The name of the file share as seen by the end user when mounting the share, such as in a URI or + UNC format in their operating system.""" + host_name: Optional[str] = rest_field(name="hostName", visibility=["read"]) + """The host name of the file share.""" + media_tier: Optional[Union[str, "_models.MediaTier"]] = rest_field(name="mediaTier", visibility=["read", "create"]) + """The storage media tier of the file share. \"SSD\"""" + redundancy: Optional[Union[str, "_models.Redundancy"]] = rest_field(visibility=["read", "create"]) + """The chosen redundancy level of the file share. Known values are: \"Local\" and \"Zone\".""" + protocol: Optional[Union[str, "_models.Protocol"]] = rest_field(visibility=["read", "create"]) + """The file sharing protocol for this file share. \"NFS\"""" + provisioned_storage_gi_b: Optional[int] = rest_field( + name="provisionedStorageGiB", visibility=["read", "create", "update"] + ) + """The provisioned storage size of the share in GiB (1 GiB is 1024^3 bytes or 1073741824 bytes). A + component of the file share's bill is the provisioned storage, regardless of the amount of used + storage.""" + provisioned_storage_next_allowed_downgrade: Optional[datetime.datetime] = rest_field( + name="provisionedStorageNextAllowedDowngrade", visibility=["read"], format="rfc3339" + ) + """A date/time value that specifies when the provisioned storage for the file share is permitted + to be reduced.""" + provisioned_io_per_sec: Optional[int] = rest_field( + name="provisionedIOPerSec", visibility=["read", "create", "update"] + ) + """The provisioned IO / sec of the share.""" + provisioned_io_per_sec_next_allowed_downgrade: Optional[datetime.datetime] = rest_field( + name="provisionedIOPerSecNextAllowedDowngrade", visibility=["read"], format="rfc3339" + ) + """A date/time value that specifies when the provisioned IOPS for the file share is permitted to + be reduced.""" + provisioned_throughput_mi_b_per_sec: Optional[int] = rest_field( + name="provisionedThroughputMiBPerSec", visibility=["read", "create", "update"] + ) + """The provisioned throughput / sec of the share.""" + provisioned_throughput_next_allowed_downgrade: Optional[datetime.datetime] = rest_field( + name="provisionedThroughputNextAllowedDowngrade", visibility=["read"], format="rfc3339" + ) + """A date/time value that specifies when the provisioned throughput for the file share is + permitted to be reduced.""" + included_burst_io_per_sec: Optional[int] = rest_field(name="includedBurstIOPerSec", visibility=["read"]) + """Burst IOPS are extra buffer IOPS enabling you to consume more than your provisioned IOPS for a + short period of time, depending on the burst credits available for your share.""" + max_burst_io_per_sec_credits: Optional[int] = rest_field(name="maxBurstIOPerSecCredits", visibility=["read"]) + """Max burst IOPS credits shows the maximum number of burst credits the share can have at the + current IOPS provisioning level.""" + nfs_protocol_properties: Optional["_models.NfsProtocolProperties"] = rest_field( + name="nfsProtocolProperties", visibility=["read", "create", "update"] + ) + """Protocol settings specific NFS.""" + public_access_properties: Optional["_models.PublicAccessProperties"] = rest_field( + name="publicAccessProperties", visibility=["read", "create", "update"] + ) + """The set of properties for control public access.""" + provisioning_state: Optional[Union[str, "_models.FileShareProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The status of the last operation. Known values are: \"Succeeded\", \"Failed\", \"Canceled\", + \"Provisioning\", \"Updating\", \"Deleting\", \"Accepted\", \"Created\", \"TransientFailure\", + \"Creating\", \"Patching\", and \"Posting\".""" + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = rest_field( + name="publicNetworkAccess", visibility=["read", "create", "update"] + ) + """Gets or sets allow or disallow public network access to azure managed file share. Known values + are: \"Enabled\" and \"Disabled\".""" + private_endpoint_connections: Optional[list["_models.PrivateEndpointConnection"]] = rest_field( + name="privateEndpointConnections", visibility=["read"] + ) + """The list of associated private endpoint connections.""" + + @overload + def __init__( + self, + *, + mount_name: Optional[str] = None, + media_tier: Optional[Union[str, "_models.MediaTier"]] = None, + redundancy: Optional[Union[str, "_models.Redundancy"]] = None, + protocol: Optional[Union[str, "_models.Protocol"]] = None, + provisioned_storage_gi_b: Optional[int] = None, + provisioned_io_per_sec: Optional[int] = None, + provisioned_throughput_mi_b_per_sec: Optional[int] = None, + nfs_protocol_properties: Optional["_models.NfsProtocolProperties"] = None, + public_access_properties: Optional["_models.PublicAccessProperties"] = None, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileShareProvisioningConstants(_Model): + """Constants used for calculating recommended values of file share provisioning properties. + + :ivar base_io_per_sec: Base IO per second. Required. + :vartype base_io_per_sec: int + :ivar scalar_io_per_sec: Scalar IO per second. Required. + :vartype scalar_io_per_sec: float + :ivar base_throughput_mi_b_per_sec: Base throughput in MiB per second. Required. + :vartype base_throughput_mi_b_per_sec: int + :ivar scalar_throughput_mi_b_per_sec: Scalar throughput in MiB per second. Required. + :vartype scalar_throughput_mi_b_per_sec: float + :ivar guardrail_io_per_sec_scalar: Guardrail scalar IO per second. Required. + :vartype guardrail_io_per_sec_scalar: float + :ivar guardrail_throughput_scalar: Guardrail scalar throughput in MiB per second. Required. + :vartype guardrail_throughput_scalar: float + """ + + base_io_per_sec: int = rest_field(name="baseIOPerSec", visibility=["read", "create", "update", "delete", "query"]) + """Base IO per second. Required.""" + scalar_io_per_sec: float = rest_field( + name="scalarIOPerSec", visibility=["read", "create", "update", "delete", "query"] + ) + """Scalar IO per second. Required.""" + base_throughput_mi_b_per_sec: int = rest_field( + name="baseThroughputMiBPerSec", visibility=["read", "create", "update", "delete", "query"] + ) + """Base throughput in MiB per second. Required.""" + scalar_throughput_mi_b_per_sec: float = rest_field( + name="scalarThroughputMiBPerSec", visibility=["read", "create", "update", "delete", "query"] + ) + """Scalar throughput in MiB per second. Required.""" + guardrail_io_per_sec_scalar: float = rest_field( + name="guardrailIOPerSecScalar", visibility=["read", "create", "update", "delete", "query"] + ) + """Guardrail scalar IO per second. Required.""" + guardrail_throughput_scalar: float = rest_field( + name="guardrailThroughputScalar", visibility=["read", "create", "update", "delete", "query"] + ) + """Guardrail scalar throughput in MiB per second. Required.""" + + @overload + def __init__( + self, + *, + base_io_per_sec: int, + scalar_io_per_sec: float, + base_throughput_mi_b_per_sec: int, + scalar_throughput_mi_b_per_sec: float, + guardrail_io_per_sec_scalar: float, + guardrail_throughput_scalar: float, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileShareProvisioningRecommendationInput(_Model): + """File share provisioning parameters recommendation API input structure. + + :ivar provisioned_storage_gi_b: The desired provisioned storage size of the share in GiB. Will + be use to calculate the values of remaining provisioning parameters. Required. + :vartype provisioned_storage_gi_b: int + """ + + provisioned_storage_gi_b: int = rest_field( + name="provisionedStorageGiB", visibility=["read", "create", "update", "delete", "query"] + ) + """The desired provisioned storage size of the share in GiB. Will be use to calculate the values + of remaining provisioning parameters. Required.""" + + @overload + def __init__( + self, + *, + provisioned_storage_gi_b: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileShareProvisioningRecommendationOutput(_Model): # pylint: disable=name-too-long + """File share provisioning parameters recommendation API result. + + :ivar provisioned_io_per_sec: The recommended value of provisioned IO / sec of the share. + Required. + :vartype provisioned_io_per_sec: int + :ivar provisioned_throughput_mi_b_per_sec: The recommended value of provisioned throughput / + sec of the share. Required. + :vartype provisioned_throughput_mi_b_per_sec: int + :ivar available_redundancy_options: Redundancy options for the share. Required. + :vartype available_redundancy_options: list[str or ~azure.mgmt.fileshares.models.Redundancy] + """ + + provisioned_io_per_sec: int = rest_field( + name="provisionedIOPerSec", visibility=["read", "create", "update", "delete", "query"] + ) + """The recommended value of provisioned IO / sec of the share. Required.""" + provisioned_throughput_mi_b_per_sec: int = rest_field( + name="provisionedThroughputMiBPerSec", visibility=["read", "create", "update", "delete", "query"] + ) + """The recommended value of provisioned throughput / sec of the share. Required.""" + available_redundancy_options: list[Union[str, "_models.Redundancy"]] = rest_field( + name="availableRedundancyOptions", visibility=["read", "create", "update", "delete", "query"] + ) + """Redundancy options for the share. Required.""" + + @overload + def __init__( + self, + *, + provisioned_io_per_sec: int, + provisioned_throughput_mi_b_per_sec: int, + available_redundancy_options: list[Union[str, "_models.Redundancy"]], + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileShareProvisioningRecommendationRequest(_Model): # pylint: disable=name-too-long + """Request structure for file share provisioning parameters recommendation API. + + :ivar properties: The properties of the file share provisioning recommendation input. Required. + :vartype properties: ~azure.mgmt.fileshares.models.FileShareProvisioningRecommendationInput + """ + + properties: "_models.FileShareProvisioningRecommendationInput" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The properties of the file share provisioning recommendation input. Required.""" + + @overload + def __init__( + self, + *, + properties: "_models.FileShareProvisioningRecommendationInput", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileShareProvisioningRecommendationResponse(_Model): # pylint: disable=name-too-long + """Response structure for file share provisioning parameters recommendation API. + + :ivar properties: The properties of the file share provisioning recommendation output. + Required. + :vartype properties: ~azure.mgmt.fileshares.models.FileShareProvisioningRecommendationOutput + """ + + properties: "_models.FileShareProvisioningRecommendationOutput" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The properties of the file share provisioning recommendation output. Required.""" + + @overload + def __init__( + self, + *, + properties: "_models.FileShareProvisioningRecommendationOutput", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class ProxyResource(Resource): + """Proxy Resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.fileshares.models.SystemData + """ + + +class FileShareSnapshot(ProxyResource): + """FileShareSnapshot resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.fileshares.models.SystemData + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.fileshares.models.FileShareSnapshotProperties + """ + + properties: Optional["_models.FileShareSnapshotProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.FileShareSnapshotProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileShareSnapshotProperties(_Model): + """FileShareSnapshot properties. + + :ivar snapshot_time: The FileShareSnapshot time in UTC in string representation. + :vartype snapshot_time: str + :ivar initiator_id: The initiator of the FileShareSnapshot. This is a user-defined value. + :vartype initiator_id: str + :ivar metadata: The metadata. + :vartype metadata: dict[str, str] + """ + + snapshot_time: Optional[str] = rest_field(name="snapshotTime", visibility=["read"]) + """The FileShareSnapshot time in UTC in string representation.""" + initiator_id: Optional[str] = rest_field(name="initiatorId", visibility=["read", "create"]) + """The initiator of the FileShareSnapshot. This is a user-defined value.""" + metadata: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update"]) + """The metadata.""" + + @overload + def __init__( + self, + *, + initiator_id: Optional[str] = None, + metadata: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileShareSnapshotUpdate(_Model): + """The type used for update operations of the FileShareSnapshot. + + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.fileshares.models.FileShareSnapshotUpdateProperties + """ + + properties: Optional["_models.FileShareSnapshotUpdateProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.FileShareSnapshotUpdateProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileShareSnapshotUpdateProperties(_Model): + """The updatable properties of the FileShareSnapshot. + + :ivar metadata: The metadata. + :vartype metadata: dict[str, str] + """ + + metadata: Optional[dict[str, str]] = rest_field(visibility=["update"]) + """The metadata.""" + + @overload + def __init__( + self, + *, + metadata: Optional[dict[str, str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileShareUpdate(_Model): + """The type used for update operations of the FileShare. + + :ivar tags: Resource tags. + :vartype tags: dict[str, str] + :ivar properties: The resource-specific properties for this resource. + :vartype properties: ~azure.mgmt.fileshares.models.FileShareUpdateProperties + """ + + tags: Optional[dict[str, str]] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """Resource tags.""" + properties: Optional["_models.FileShareUpdateProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The resource-specific properties for this resource.""" + + @overload + def __init__( + self, + *, + tags: Optional[dict[str, str]] = None, + properties: Optional["_models.FileShareUpdateProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileShareUpdateProperties(_Model): + """The updatable properties of the FileShare. + + :ivar provisioned_storage_gi_b: The provisioned storage size of the share in GiB (1 GiB is + 1024^3 bytes or 1073741824 bytes). A component of the file share's bill is the provisioned + storage, regardless of the amount of used storage. + :vartype provisioned_storage_gi_b: int + :ivar provisioned_io_per_sec: The provisioned IO / sec of the share. + :vartype provisioned_io_per_sec: int + :ivar provisioned_throughput_mi_b_per_sec: The provisioned throughput / sec of the share. + :vartype provisioned_throughput_mi_b_per_sec: int + :ivar nfs_protocol_properties: Protocol settings specific NFS. + :vartype nfs_protocol_properties: ~azure.mgmt.fileshares.models.NfsProtocolProperties + :ivar public_access_properties: The set of properties for control public access. + :vartype public_access_properties: ~azure.mgmt.fileshares.models.PublicAccessProperties + :ivar public_network_access: Gets or sets allow or disallow public network access to azure + managed file share. Known values are: "Enabled" and "Disabled". + :vartype public_network_access: str or ~azure.mgmt.fileshares.models.PublicNetworkAccess + """ + + provisioned_storage_gi_b: Optional[int] = rest_field(name="provisionedStorageGiB", visibility=["update"]) + """The provisioned storage size of the share in GiB (1 GiB is 1024^3 bytes or 1073741824 bytes). A + component of the file share's bill is the provisioned storage, regardless of the amount of used + storage.""" + provisioned_io_per_sec: Optional[int] = rest_field(name="provisionedIOPerSec", visibility=["update"]) + """The provisioned IO / sec of the share.""" + provisioned_throughput_mi_b_per_sec: Optional[int] = rest_field( + name="provisionedThroughputMiBPerSec", visibility=["update"] + ) + """The provisioned throughput / sec of the share.""" + nfs_protocol_properties: Optional["_models.NfsProtocolProperties"] = rest_field( + name="nfsProtocolProperties", visibility=["update"] + ) + """Protocol settings specific NFS.""" + public_access_properties: Optional["_models.PublicAccessProperties"] = rest_field( + name="publicAccessProperties", visibility=["update"] + ) + """The set of properties for control public access.""" + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = rest_field( + name="publicNetworkAccess", visibility=["update"] + ) + """Gets or sets allow or disallow public network access to azure managed file share. Known values + are: \"Enabled\" and \"Disabled\".""" + + @overload + def __init__( + self, + *, + provisioned_storage_gi_b: Optional[int] = None, + provisioned_io_per_sec: Optional[int] = None, + provisioned_throughput_mi_b_per_sec: Optional[int] = None, + nfs_protocol_properties: Optional["_models.NfsProtocolProperties"] = None, + public_access_properties: Optional["_models.PublicAccessProperties"] = None, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccess"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileShareUsageDataOutput(_Model): + """File shares usage result. + + :ivar live_shares: File share usage data for active file shares. Required. + :vartype live_shares: ~azure.mgmt.fileshares.models.LiveSharesUsageData + """ + + live_shares: "_models.LiveSharesUsageData" = rest_field( + name="liveShares", visibility=["read", "create", "update", "delete", "query"] + ) + """File share usage data for active file shares. Required.""" + + @overload + def __init__( + self, + *, + live_shares: "_models.LiveSharesUsageData", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class FileShareUsageDataResponse(_Model): + """Response structure for file shares usage in the specified subscription/location. + + :ivar properties: The properties of the file share usage data. Required. + :vartype properties: ~azure.mgmt.fileshares.models.FileShareUsageDataOutput + """ + + properties: "_models.FileShareUsageDataOutput" = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """The properties of the file share usage data. Required.""" + + @overload + def __init__( + self, + *, + properties: "_models.FileShareUsageDataOutput", + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class LiveSharesUsageData(_Model): + """Usage data for live shares. + + :ivar file_share_count: The number of active file shares. Required. + :vartype file_share_count: int + """ + + file_share_count: int = rest_field( + name="fileShareCount", visibility=["read", "create", "update", "delete", "query"] + ) + """The number of active file shares. Required.""" + + @overload + def __init__( + self, + *, + file_share_count: int, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class NfsProtocolProperties(_Model): + """Properties specific to the NFS protocol. + + :ivar root_squash: Root squash defines how root users on clients are mapped to the NFS share. + Known values are: "NoRootSquash", "RootSquash", and "AllSquash". + :vartype root_squash: str or ~azure.mgmt.fileshares.models.ShareRootSquash + :ivar encryption_in_transit_required: Encryption in transit defines whether data is encrypted + for NFS shares. Known values are: "Enabled" and "Disabled". + :vartype encryption_in_transit_required: str or + ~azure.mgmt.fileshares.models.EncryptionInTransitRequired + """ + + root_squash: Optional[Union[str, "_models.ShareRootSquash"]] = rest_field( + name="rootSquash", visibility=["read", "create", "update", "delete", "query"] + ) + """Root squash defines how root users on clients are mapped to the NFS share. Known values are: + \"NoRootSquash\", \"RootSquash\", and \"AllSquash\".""" + encryption_in_transit_required: Optional[Union[str, "_models.EncryptionInTransitRequired"]] = rest_field( + name="encryptionInTransitRequired", visibility=["read", "create", "update", "delete", "query"] + ) + """Encryption in transit defines whether data is encrypted for NFS shares. Known values are: + \"Enabled\" and \"Disabled\".""" + + @overload + def __init__( + self, + *, + root_squash: Optional[Union[str, "_models.ShareRootSquash"]] = None, + encryption_in_transit_required: Optional[Union[str, "_models.EncryptionInTransitRequired"]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class Operation(_Model): + """REST API Operation. + + :ivar name: The name of the operation, as per Resource-Based Access Control (RBAC). Examples: + "Microsoft.Compute/virtualMachines/write", "Microsoft.Compute/virtualMachines/capture/action". + :vartype name: str + :ivar is_data_action: Whether the operation applies to data-plane. This is "true" for + data-plane operations and "false" for Azure Resource Manager/control-plane operations. + :vartype is_data_action: bool + :ivar display: Localized display information for this particular operation. + :vartype display: ~azure.mgmt.fileshares.models.OperationDisplay + :ivar origin: The intended executor of the operation; as in Resource Based Access Control + (RBAC) and audit logs UX. Default value is "user,system". Known values are: "user", "system", + and "user,system". + :vartype origin: str or ~azure.mgmt.fileshares.models.Origin + :ivar action_type: Extensible enum. Indicates the action type. "Internal" refers to actions + that are for internal only APIs. "Internal" + :vartype action_type: str or ~azure.mgmt.fileshares.models.ActionType + """ + + name: Optional[str] = rest_field(visibility=["read"]) + """The name of the operation, as per Resource-Based Access Control (RBAC). Examples: + \"Microsoft.Compute/virtualMachines/write\", + \"Microsoft.Compute/virtualMachines/capture/action\".""" + is_data_action: Optional[bool] = rest_field(name="isDataAction", visibility=["read"]) + """Whether the operation applies to data-plane. This is \"true\" for data-plane operations and + \"false\" for Azure Resource Manager/control-plane operations.""" + display: Optional["_models.OperationDisplay"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Localized display information for this particular operation.""" + origin: Optional[Union[str, "_models.Origin"]] = rest_field(visibility=["read"]) + """The intended executor of the operation; as in Resource Based Access Control (RBAC) and audit + logs UX. Default value is \"user,system\". Known values are: \"user\", \"system\", and + \"user,system\".""" + action_type: Optional[Union[str, "_models.ActionType"]] = rest_field(name="actionType", visibility=["read"]) + """Extensible enum. Indicates the action type. \"Internal\" refers to actions that are for + internal only APIs. \"Internal\"""" + + @overload + def __init__( + self, + *, + display: Optional["_models.OperationDisplay"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class OperationDisplay(_Model): + """Localized display information for an operation. + + :ivar provider: The localized friendly form of the resource provider name, e.g. "Microsoft + Monitoring Insights" or "Microsoft Compute". + :vartype provider: str + :ivar resource: The localized friendly name of the resource type related to this operation. + E.g. "Virtual Machines" or "Job Schedule Collections". + :vartype resource: str + :ivar operation: The concise, localized friendly name for the operation; suitable for + dropdowns. E.g. "Create or Update Virtual Machine", "Restart Virtual Machine". + :vartype operation: str + :ivar description: The short, localized friendly description of the operation; suitable for + tool tips and detailed views. + :vartype description: str + """ + + provider: Optional[str] = rest_field(visibility=["read"]) + """The localized friendly form of the resource provider name, e.g. \"Microsoft Monitoring + Insights\" or \"Microsoft Compute\".""" + resource: Optional[str] = rest_field(visibility=["read"]) + """The localized friendly name of the resource type related to this operation. E.g. \"Virtual + Machines\" or \"Job Schedule Collections\".""" + operation: Optional[str] = rest_field(visibility=["read"]) + """The concise, localized friendly name for the operation; suitable for dropdowns. E.g. \"Create + or Update Virtual Machine\", \"Restart Virtual Machine\".""" + description: Optional[str] = rest_field(visibility=["read"]) + """The short, localized friendly description of the operation; suitable for tool tips and detailed + views.""" + + +class PrivateEndpoint(_Model): + """The private endpoint resource. + + :ivar id: The resource identifier of the private endpoint. + :vartype id: str + """ + + id: Optional[str] = rest_field(visibility=["read"]) + """The resource identifier of the private endpoint.""" + + +class PrivateEndpointConnection(Resource): + """The private endpoint connection resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.fileshares.models.SystemData + :ivar properties: Resource properties. + :vartype properties: ~azure.mgmt.fileshares.models.PrivateEndpointConnectionProperties + """ + + properties: Optional["_models.PrivateEndpointConnectionProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Resource properties.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.PrivateEndpointConnectionProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PrivateEndpointConnectionProperties(_Model): + """Properties of the private endpoint connection. + + :ivar group_ids: The group ids for the private endpoint resource. + :vartype group_ids: list[str] + :ivar private_endpoint: The private endpoint resource. + :vartype private_endpoint: ~azure.mgmt.fileshares.models.PrivateEndpoint + :ivar private_link_service_connection_state: A collection of information about the state of the + connection between service consumer and provider. Required. + :vartype private_link_service_connection_state: + ~azure.mgmt.fileshares.models.PrivateLinkServiceConnectionState + :ivar provisioning_state: The provisioning state of the private endpoint connection resource. + Known values are: "Succeeded", "Creating", "Deleting", and "Failed". + :vartype provisioning_state: str or + ~azure.mgmt.fileshares.models.PrivateEndpointConnectionProvisioningState + """ + + group_ids: Optional[list[str]] = rest_field(name="groupIds", visibility=["read"]) + """The group ids for the private endpoint resource.""" + private_endpoint: Optional["_models.PrivateEndpoint"] = rest_field( + name="privateEndpoint", visibility=["read", "create", "update", "delete", "query"] + ) + """The private endpoint resource.""" + private_link_service_connection_state: "_models.PrivateLinkServiceConnectionState" = rest_field( + name="privateLinkServiceConnectionState", visibility=["read", "create", "update", "delete", "query"] + ) + """A collection of information about the state of the connection between service consumer and + provider. Required.""" + provisioning_state: Optional[Union[str, "_models.PrivateEndpointConnectionProvisioningState"]] = rest_field( + name="provisioningState", visibility=["read"] + ) + """The provisioning state of the private endpoint connection resource. Known values are: + \"Succeeded\", \"Creating\", \"Deleting\", and \"Failed\".""" + + @overload + def __init__( + self, + *, + private_link_service_connection_state: "_models.PrivateLinkServiceConnectionState", + private_endpoint: Optional["_models.PrivateEndpoint"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PrivateLinkResource(ProxyResource): + """A private link resource. + + :ivar id: Fully qualified resource ID for the resource. Ex - + /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. + :vartype id: str + :ivar name: The name of the resource. + :vartype name: str + :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or + "Microsoft.Storage/storageAccounts". + :vartype type: str + :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy + information. + :vartype system_data: ~azure.mgmt.fileshares.models.SystemData + :ivar properties: Resource properties. + :vartype properties: ~azure.mgmt.fileshares.models.PrivateLinkResourceProperties + """ + + properties: Optional["_models.PrivateLinkResourceProperties"] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Resource properties.""" + + @overload + def __init__( + self, + *, + properties: Optional["_models.PrivateLinkResourceProperties"] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PrivateLinkResourceProperties(_Model): + """Properties of a private link resource. + + :ivar group_id: The private link resource group id. + :vartype group_id: str + :ivar required_members: The private link resource required member names. + :vartype required_members: list[str] + :ivar required_zone_names: The private link resource private link DNS zone name. + :vartype required_zone_names: list[str] + """ + + group_id: Optional[str] = rest_field(name="groupId", visibility=["read"]) + """The private link resource group id.""" + required_members: Optional[list[str]] = rest_field(name="requiredMembers", visibility=["read"]) + """The private link resource required member names.""" + required_zone_names: Optional[list[str]] = rest_field(name="requiredZoneNames", visibility=["read"]) + """The private link resource private link DNS zone name.""" + + +class PrivateLinkServiceConnectionState(_Model): + """A collection of information about the state of the connection between service consumer and + provider. + + :ivar status: Indicates whether the connection has been Approved/Rejected/Removed by the owner + of the service. Known values are: "Pending", "Approved", and "Rejected". + :vartype status: str or ~azure.mgmt.fileshares.models.PrivateEndpointServiceConnectionStatus + :ivar description: The reason for approval/rejection of the connection. + :vartype description: str + :ivar actions_required: A message indicating if changes on the service provider require any + updates on the consumer. + :vartype actions_required: str + """ + + status: Optional[Union[str, "_models.PrivateEndpointServiceConnectionStatus"]] = rest_field( + visibility=["read", "create", "update", "delete", "query"] + ) + """Indicates whether the connection has been Approved/Rejected/Removed by the owner of the + service. Known values are: \"Pending\", \"Approved\", and \"Rejected\".""" + description: Optional[str] = rest_field(visibility=["read", "create", "update", "delete", "query"]) + """The reason for approval/rejection of the connection.""" + actions_required: Optional[str] = rest_field( + name="actionsRequired", visibility=["read", "create", "update", "delete", "query"] + ) + """A message indicating if changes on the service provider require any updates on the consumer.""" + + @overload + def __init__( + self, + *, + status: Optional[Union[str, "_models.PrivateEndpointServiceConnectionStatus"]] = None, + description: Optional[str] = None, + actions_required: Optional[str] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class PublicAccessProperties(_Model): + """The set of properties for control public access. + + :ivar allowed_subnets: The allowed set of subnets when access is restricted. + :vartype allowed_subnets: list[str] + """ + + allowed_subnets: Optional[list[str]] = rest_field( + name="allowedSubnets", visibility=["read", "create", "update", "delete", "query"] + ) + """The allowed set of subnets when access is restricted.""" + + @overload + def __init__( + self, + *, + allowed_subnets: Optional[list[str]] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + +class SystemData(_Model): + """Metadata pertaining to creation and last modification of the resource. + + :ivar created_by: The identity that created the resource. + :vartype created_by: str + :ivar created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", and "Key". + :vartype created_by_type: str or ~azure.mgmt.fileshares.models.CreatedByType + :ivar created_at: The timestamp of resource creation (UTC). + :vartype created_at: ~datetime.datetime + :ivar last_modified_by: The identity that last modified the resource. + :vartype last_modified_by: str + :ivar last_modified_by_type: The type of identity that last modified the resource. Known values + are: "User", "Application", "ManagedIdentity", and "Key". + :vartype last_modified_by_type: str or ~azure.mgmt.fileshares.models.CreatedByType + :ivar last_modified_at: The timestamp of resource last modification (UTC). + :vartype last_modified_at: ~datetime.datetime + """ + + created_by: Optional[str] = rest_field(name="createdBy", visibility=["read", "create", "update", "delete", "query"]) + """The identity that created the resource.""" + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = rest_field( + name="createdByType", visibility=["read", "create", "update", "delete", "query"] + ) + """The type of identity that created the resource. Known values are: \"User\", \"Application\", + \"ManagedIdentity\", and \"Key\".""" + created_at: Optional[datetime.datetime] = rest_field( + name="createdAt", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The timestamp of resource creation (UTC).""" + last_modified_by: Optional[str] = rest_field( + name="lastModifiedBy", visibility=["read", "create", "update", "delete", "query"] + ) + """The identity that last modified the resource.""" + last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = rest_field( + name="lastModifiedByType", visibility=["read", "create", "update", "delete", "query"] + ) + """The type of identity that last modified the resource. Known values are: \"User\", + \"Application\", \"ManagedIdentity\", and \"Key\".""" + last_modified_at: Optional[datetime.datetime] = rest_field( + name="lastModifiedAt", visibility=["read", "create", "update", "delete", "query"], format="rfc3339" + ) + """The timestamp of resource last modification (UTC).""" + + @overload + def __init__( + self, + *, + created_by: Optional[str] = None, + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, + created_at: Optional[datetime.datetime] = None, + last_modified_by: Optional[str] = None, + last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, + last_modified_at: Optional[datetime.datetime] = None, + ) -> None: ... + + @overload + def __init__(self, mapping: Mapping[str, Any]) -> None: + """ + :param mapping: raw JSON to initialize the model. + :type mapping: Mapping[str, Any] + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/models/_patch.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/models/_patch.py new file mode 100644 index 000000000000..ea765788358a --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/models/_patch.py @@ -0,0 +1,20 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/operations/__init__.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/operations/__init__.py new file mode 100644 index 000000000000..07fba52bd74b --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/operations/__init__.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +# pylint: disable=wrong-import-position + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from ._patch import * # pylint: disable=unused-wildcard-import + +from ._operations import FileSharesOperations # type: ignore +from ._operations import FileShareSnapshotsOperations # type: ignore +from ._operations import Operations # type: ignore +from ._operations import InformationalOperationsOperations # type: ignore +from ._operations import PrivateEndpointConnectionsOperations # type: ignore +from ._operations import PrivateLinkResourcesOperations # type: ignore + +from ._patch import __all__ as _patch_all +from ._patch import * +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "FileSharesOperations", + "FileShareSnapshotsOperations", + "Operations", + "InformationalOperationsOperations", + "PrivateEndpointConnectionsOperations", + "PrivateLinkResourcesOperations", +] +__all__.extend([p for p in _patch_all if p not in __all__]) # pyright: ignore +_patch_sdk() diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/operations/_operations.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/operations/_operations.py new file mode 100644 index 000000000000..8884f760816b --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/operations/_operations.py @@ -0,0 +1,3807 @@ +# pylint: disable=line-too-long,useless-suppression,too-many-lines +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from collections.abc import MutableMapping +from io import IOBase +import json +from typing import Any, Callable, IO, Iterator, Optional, TypeVar, Union, cast, overload +import urllib.parse + +from azure.core import PipelineClient +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + StreamClosedError, + StreamConsumedError, + map_error, +) +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.core.rest import HttpRequest, HttpResponse +from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models as _models +from .._configuration import FileSharesMgmtClientConfiguration +from .._utils.model_base import SdkJSONEncoder, _deserialize, _failsafe_deserialize +from .._utils.serialization import Deserializer, Serializer +from .._validation import api_version_validation + +JSON = MutableMapping[str, Any] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, dict[str, Any]], Any]] +List = list + +_SERIALIZER = Serializer() +_SERIALIZER.client_side_validation = False + + +def build_file_shares_get_request( + resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.FileShares/fileShares/{resourceName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "resourceName": _SERIALIZER.url("resource_name", resource_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_shares_create_or_update_request( # pylint: disable=name-too-long + resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.FileShares/fileShares/{resourceName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "resourceName": _SERIALIZER.url("resource_name", resource_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_shares_update_request( + resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.FileShares/fileShares/{resourceName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "resourceName": _SERIALIZER.url("resource_name", resource_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_shares_delete_request( + resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.FileShares/fileShares/{resourceName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "resourceName": _SERIALIZER.url("resource_name", resource_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_file_shares_list_by_subscription_request( # pylint: disable=name-too-long + subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/providers/Microsoft.FileShares/fileShares" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_shares_list_by_parent_request( + resource_group_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = ( + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.FileShares/fileShares" + ) + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_shares_check_name_availability_request( # pylint: disable=name-too-long + location: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/providers/Microsoft.FileShares/locations/{location}/checkNameAvailability" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "location": _SERIALIZER.url("location", location, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_share_snapshots_get_file_share_snapshot_request( # pylint: disable=name-too-long + resource_group_name: str, resource_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.FileShares/fileShares/{resourceName}/fileShareSnapshots/{name}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "resourceName": _SERIALIZER.url("resource_name", resource_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_share_snapshots_create_or_update_file_share_snapshot_request( # pylint: disable=name-too-long + resource_group_name: str, resource_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.FileShares/fileShares/{resourceName}/fileShareSnapshots/{name}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "resourceName": _SERIALIZER.url("resource_name", resource_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_share_snapshots_update_file_share_snapshot_request( # pylint: disable=name-too-long + resource_group_name: str, resource_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.FileShares/fileShares/{resourceName}/fileShareSnapshots/{name}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "resourceName": _SERIALIZER.url("resource_name", resource_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_file_share_snapshots_delete_file_share_snapshot_request( # pylint: disable=name-too-long + resource_group_name: str, resource_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.FileShares/fileShares/{resourceName}/fileShareSnapshots/{name}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "resourceName": _SERIALIZER.url("resource_name", resource_name, "str"), + "name": _SERIALIZER.url("name", name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_file_share_snapshots_list_by_file_share_request( # pylint: disable=name-too-long + resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.FileShares/fileShares/{resourceName}/fileShareSnapshots" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "resourceName": _SERIALIZER.url("resource_name", resource_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_operations_list_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/providers/Microsoft.FileShares/operations" + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_informational_operations_get_usage_data_request( # pylint: disable=name-too-long + location: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/providers/Microsoft.FileShares/locations/{location}/getUsageData" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "location": _SERIALIZER.url("location", location, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_informational_operations_get_limits_request( # pylint: disable=name-too-long + location: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/providers/Microsoft.FileShares/locations/{location}/getLimits" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "location": _SERIALIZER.url("location", location, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_informational_operations_get_provisioning_recommendation_request( # pylint: disable=name-too-long + location: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/providers/Microsoft.FileShares/locations/{location}/getProvisioningRecommendation" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "location": _SERIALIZER.url("location", location, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_private_endpoint_connections_get_request( # pylint: disable=name-too-long + resource_group_name: str, + resource_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.FileShares/fileShares/{resourceName}/privateEndpointConnections/{privateEndpointConnectionName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "resourceName": _SERIALIZER.url("resource_name", resource_name, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_private_endpoint_connections_create_request( # pylint: disable=name-too-long + resource_group_name: str, + resource_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.FileShares/fileShares/{resourceName}/privateEndpointConnections/{privateEndpointConnectionName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "resourceName": _SERIALIZER.url("resource_name", resource_name, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_private_endpoint_connections_delete_request( # pylint: disable=name-too-long + resource_group_name: str, + resource_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.FileShares/fileShares/{resourceName}/privateEndpointConnections/{privateEndpointConnectionName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "resourceName": _SERIALIZER.url("resource_name", resource_name, "str"), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) + + +def build_private_endpoint_connections_list_by_file_share_request( # pylint: disable=name-too-long + resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.FileShares/fileShares/{resourceName}/privateEndpointConnections" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "resourceName": _SERIALIZER.url("resource_name", resource_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_private_link_resources_get_request( + resource_group_name: str, resource_name: str, private_link_resource_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.FileShares/fileShares/{resourceName}/privateLinkResources/{privateLinkResourceName}" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "resourceName": _SERIALIZER.url("resource_name", resource_name, "str"), + "privateLinkResourceName": _SERIALIZER.url("private_link_resource_name", private_link_resource_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_private_link_resources_list_request( # pylint: disable=name-too-long + resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2026-06-01")) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.FileShares/fileShares/{resourceName}/privateLinkResources" + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), + "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), + "resourceName": _SERIALIZER.url("resource_name", resource_name, "str"), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class FileSharesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.fileshares.FileSharesMgmtClient`'s + :attr:`file_shares` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: FileSharesMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get(self, resource_group_name: str, resource_name: str, **kwargs: Any) -> _models.FileShare: + """Get a FileShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :return: FileShare. The FileShare is compatible with MutableMapping + :rtype: ~azure.mgmt.fileshares.models.FileShare + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FileShare] = kwargs.pop("cls", None) + + _request = build_file_shares_get_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileShare, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_initial( + self, + resource_group_name: str, + resource_name: str, + resource: Union[_models.FileShare, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_file_shares_create_or_update_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 201]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 201: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + resource_name: str, + resource: _models.FileShare, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FileShare]: + """Create or update a file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.fileshares.models.FileShare + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns FileShare. The FileShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.fileshares.models.FileShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + resource_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FileShare]: + """Create or update a file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns FileShare. The FileShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.fileshares.models.FileShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + resource_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FileShare]: + """Create or update a file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns FileShare. The FileShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.fileshares.models.FileShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, + resource_group_name: str, + resource_name: str, + resource: Union[_models.FileShare, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.FileShare]: + """Create or update a file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param resource: Resource create parameters. Is one of the following types: FileShare, JSON, + IO[bytes] Required. + :type resource: ~azure.mgmt.fileshares.models.FileShare or JSON or IO[bytes] + :return: An instance of LROPoller that returns FileShare. The FileShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.fileshares.models.FileShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FileShare] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_initial( + resource_group_name=resource_group_name, + resource_name=resource_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.FileShare, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.FileShare].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.FileShare]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _update_initial( + self, + resource_group_name: str, + resource_name: str, + properties: Union[_models.FileShareUpdate, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_file_shares_update_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update( + self, + resource_group_name: str, + resource_name: str, + properties: _models.FileShareUpdate, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FileShare]: + """Update a FileShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.fileshares.models.FileShareUpdate + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns FileShare. The FileShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.fileshares.models.FileShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + resource_name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FileShare]: + """Update a FileShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns FileShare. The FileShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.fileshares.models.FileShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + resource_name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FileShare]: + """Update a FileShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns FileShare. The FileShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.fileshares.models.FileShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update( + self, + resource_group_name: str, + resource_name: str, + properties: Union[_models.FileShareUpdate, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.FileShare]: + """Update a FileShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param properties: The resource properties to be updated. Is one of the following types: + FileShareUpdate, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.fileshares.models.FileShareUpdate or JSON or IO[bytes] + :return: An instance of LROPoller that returns FileShare. The FileShare is compatible with + MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.fileshares.models.FileShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FileShare] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_initial( + resource_group_name=resource_group_name, + resource_name=resource_name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.FileShare, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.FileShare].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.FileShare]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_initial(self, resource_group_name: str, resource_name: str, **kwargs: Any) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_file_shares_delete_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete(self, resource_group_name: str, resource_name: str, **kwargs: Any) -> LROPoller[None]: + """Delete a FileShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + resource_name=resource_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_subscription(self, **kwargs: Any) -> ItemPaged["_models.FileShare"]: + """List FileShare resources by subscription ID. + + :return: An iterator like instance of FileShare + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.fileshares.models.FileShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.FileShare]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_file_shares_list_by_subscription_request( + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.FileShare], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @distributed_trace + def list_by_parent(self, resource_group_name: str, **kwargs: Any) -> ItemPaged["_models.FileShare"]: + """List FileShare resources by resource group. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :return: An iterator like instance of FileShare + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.fileshares.models.FileShare] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.FileShare]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_file_shares_list_by_parent_request( + resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.FileShare], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + @overload + def check_name_availability( + self, + location: str, + body: _models.CheckNameAvailabilityRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CheckNameAvailabilityResponse: + """Implements local CheckNameAvailability operations. + + :param location: The name of the Azure region. Required. + :type location: str + :param body: The CheckAvailability request. Required. + :type body: ~azure.mgmt.fileshares.models.CheckNameAvailabilityRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: CheckNameAvailabilityResponse. The CheckNameAvailabilityResponse is compatible with + MutableMapping + :rtype: ~azure.mgmt.fileshares.models.CheckNameAvailabilityResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def check_name_availability( + self, location: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.CheckNameAvailabilityResponse: + """Implements local CheckNameAvailability operations. + + :param location: The name of the Azure region. Required. + :type location: str + :param body: The CheckAvailability request. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: CheckNameAvailabilityResponse. The CheckNameAvailabilityResponse is compatible with + MutableMapping + :rtype: ~azure.mgmt.fileshares.models.CheckNameAvailabilityResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def check_name_availability( + self, location: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.CheckNameAvailabilityResponse: + """Implements local CheckNameAvailability operations. + + :param location: The name of the Azure region. Required. + :type location: str + :param body: The CheckAvailability request. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: CheckNameAvailabilityResponse. The CheckNameAvailabilityResponse is compatible with + MutableMapping + :rtype: ~azure.mgmt.fileshares.models.CheckNameAvailabilityResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def check_name_availability( + self, location: str, body: Union[_models.CheckNameAvailabilityRequest, JSON, IO[bytes]], **kwargs: Any + ) -> _models.CheckNameAvailabilityResponse: + """Implements local CheckNameAvailability operations. + + :param location: The name of the Azure region. Required. + :type location: str + :param body: The CheckAvailability request. Is one of the following types: + CheckNameAvailabilityRequest, JSON, IO[bytes] Required. + :type body: ~azure.mgmt.fileshares.models.CheckNameAvailabilityRequest or JSON or IO[bytes] + :return: CheckNameAvailabilityResponse. The CheckNameAvailabilityResponse is compatible with + MutableMapping + :rtype: ~azure.mgmt.fileshares.models.CheckNameAvailabilityResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.CheckNameAvailabilityResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_file_shares_check_name_availability_request( + location=location, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.CheckNameAvailabilityResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class FileShareSnapshotsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.fileshares.FileSharesMgmtClient`'s + :attr:`file_share_snapshots` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: FileSharesMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get_file_share_snapshot( + self, resource_group_name: str, resource_name: str, name: str, **kwargs: Any + ) -> _models.FileShareSnapshot: + """Get a FileShareSnapshot. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param name: The name of the FileShareSnapshot. Required. + :type name: str + :return: FileShareSnapshot. The FileShareSnapshot is compatible with MutableMapping + :rtype: ~azure.mgmt.fileshares.models.FileShareSnapshot + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FileShareSnapshot] = kwargs.pop("cls", None) + + _request = build_file_share_snapshots_get_file_share_snapshot_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileShareSnapshot, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + def _create_or_update_file_share_snapshot_initial( # pylint: disable=name-too-long + self, + resource_group_name: str, + resource_name: str, + name: str, + resource: Union[_models.FileShareSnapshot, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_file_share_snapshots_create_or_update_file_share_snapshot_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + name=name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create_or_update_file_share_snapshot( # pylint: disable=name-too-long + self, + resource_group_name: str, + resource_name: str, + name: str, + resource: _models.FileShareSnapshot, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FileShareSnapshot]: + """Create a FileShareSnapshot. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param name: The name of the FileShareSnapshot. Required. + :type name: str + :param resource: Resource create parameters. Required. + :type resource: ~azure.mgmt.fileshares.models.FileShareSnapshot + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns FileShareSnapshot. The FileShareSnapshot is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.fileshares.models.FileShareSnapshot] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update_file_share_snapshot( # pylint: disable=name-too-long + self, + resource_group_name: str, + resource_name: str, + name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FileShareSnapshot]: + """Create a FileShareSnapshot. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param name: The name of the FileShareSnapshot. Required. + :type name: str + :param resource: Resource create parameters. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns FileShareSnapshot. The FileShareSnapshot is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.fileshares.models.FileShareSnapshot] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update_file_share_snapshot( # pylint: disable=name-too-long + self, + resource_group_name: str, + resource_name: str, + name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FileShareSnapshot]: + """Create a FileShareSnapshot. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param name: The name of the FileShareSnapshot. Required. + :type name: str + :param resource: Resource create parameters. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns FileShareSnapshot. The FileShareSnapshot is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.fileshares.models.FileShareSnapshot] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update_file_share_snapshot( # pylint: disable=name-too-long + self, + resource_group_name: str, + resource_name: str, + name: str, + resource: Union[_models.FileShareSnapshot, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.FileShareSnapshot]: + """Create a FileShareSnapshot. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param name: The name of the FileShareSnapshot. Required. + :type name: str + :param resource: Resource create parameters. Is one of the following types: FileShareSnapshot, + JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.fileshares.models.FileShareSnapshot or JSON or IO[bytes] + :return: An instance of LROPoller that returns FileShareSnapshot. The FileShareSnapshot is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.fileshares.models.FileShareSnapshot] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FileShareSnapshot] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_or_update_file_share_snapshot_initial( + resource_group_name=resource_group_name, + resource_name=resource_name, + name=name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response_headers = {} + response = pipeline_response.http_response + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = _deserialize(_models.FileShareSnapshot, response.json()) + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.FileShareSnapshot].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.FileShareSnapshot]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _update_file_share_snapshot_initial( + self, + resource_group_name: str, + resource_name: str, + name: str, + properties: Union[_models.FileShareSnapshotUpdate, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(properties, (IOBase, bytes)): + _content = properties + else: + _content = json.dumps(properties, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_file_share_snapshots_update_file_share_snapshot_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + name=name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_update_file_share_snapshot( + self, + resource_group_name: str, + resource_name: str, + name: str, + properties: _models.FileShareSnapshotUpdate, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FileShareSnapshot]: + """Update a FileShareSnapshot. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param name: The name of the FileShareSnapshot. Required. + :type name: str + :param properties: The resource properties to be updated. Required. + :type properties: ~azure.mgmt.fileshares.models.FileShareSnapshotUpdate + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns FileShareSnapshot. The FileShareSnapshot is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.fileshares.models.FileShareSnapshot] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update_file_share_snapshot( + self, + resource_group_name: str, + resource_name: str, + name: str, + properties: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FileShareSnapshot]: + """Update a FileShareSnapshot. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param name: The name of the FileShareSnapshot. Required. + :type name: str + :param properties: The resource properties to be updated. Required. + :type properties: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns FileShareSnapshot. The FileShareSnapshot is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.fileshares.models.FileShareSnapshot] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update_file_share_snapshot( + self, + resource_group_name: str, + resource_name: str, + name: str, + properties: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FileShareSnapshot]: + """Update a FileShareSnapshot. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param name: The name of the FileShareSnapshot. Required. + :type name: str + :param properties: The resource properties to be updated. Required. + :type properties: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns FileShareSnapshot. The FileShareSnapshot is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.fileshares.models.FileShareSnapshot] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_update_file_share_snapshot( + self, + resource_group_name: str, + resource_name: str, + name: str, + properties: Union[_models.FileShareSnapshotUpdate, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.FileShareSnapshot]: + """Update a FileShareSnapshot. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param name: The name of the FileShareSnapshot. Required. + :type name: str + :param properties: The resource properties to be updated. Is one of the following types: + FileShareSnapshotUpdate, JSON, IO[bytes] Required. + :type properties: ~azure.mgmt.fileshares.models.FileShareSnapshotUpdate or JSON or IO[bytes] + :return: An instance of LROPoller that returns FileShareSnapshot. The FileShareSnapshot is + compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.fileshares.models.FileShareSnapshot] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FileShareSnapshot] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._update_file_share_snapshot_initial( + resource_group_name=resource_group_name, + resource_name=resource_name, + name=name, + properties=properties, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.FileShareSnapshot, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.FileShareSnapshot].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.FileShareSnapshot]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + def _delete_file_share_snapshot_initial( + self, resource_group_name: str, resource_name: str, name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_file_share_snapshots_delete_file_share_snapshot_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + name=name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def begin_delete_file_share_snapshot( + self, resource_group_name: str, resource_name: str, name: str, **kwargs: Any + ) -> LROPoller[None]: + """Delete a FileShareSnapshot. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param name: The name of the FileShareSnapshot. Required. + :type name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_file_share_snapshot_initial( + resource_group_name=resource_group_name, + resource_name=resource_name, + name=name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + def list_by_file_share( + self, resource_group_name: str, resource_name: str, **kwargs: Any + ) -> ItemPaged["_models.FileShareSnapshot"]: + """List FileShareSnapshot by FileShare. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :return: An iterator like instance of FileShareSnapshot + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.fileshares.models.FileShareSnapshot] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.FileShareSnapshot]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_file_share_snapshots_list_by_file_share_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.FileShareSnapshot], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class Operations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.fileshares.FileSharesMgmtClient`'s + :attr:`operations` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: FileSharesMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def list(self, **kwargs: Any) -> ItemPaged["_models.Operation"]: + """List the operations for the provider. + + :return: An iterator like instance of Operation + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.fileshares.models.Operation] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.Operation]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_operations_list_request( + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.Operation], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class InformationalOperationsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.fileshares.FileSharesMgmtClient`'s + :attr:`informational_operations` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: FileSharesMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + def get_usage_data(self, location: str, **kwargs: Any) -> _models.FileShareUsageDataResponse: + """Get file shares usage data. + + :param location: The name of the Azure region. Required. + :type location: str + :return: FileShareUsageDataResponse. The FileShareUsageDataResponse is compatible with + MutableMapping + :rtype: ~azure.mgmt.fileshares.models.FileShareUsageDataResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FileShareUsageDataResponse] = kwargs.pop("cls", None) + + _request = build_informational_operations_get_usage_data_request( + location=location, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileShareUsageDataResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + def get_limits(self, location: str, **kwargs: Any) -> _models.FileShareLimitsResponse: + """Get file shares limits. + + :param location: The name of the Azure region. Required. + :type location: str + :return: FileShareLimitsResponse. The FileShareLimitsResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.fileshares.models.FileShareLimitsResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.FileShareLimitsResponse] = kwargs.pop("cls", None) + + _request = build_informational_operations_get_limits_request( + location=location, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileShareLimitsResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @overload + def get_provisioning_recommendation( + self, + location: str, + body: _models.FileShareProvisioningRecommendationRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.FileShareProvisioningRecommendationResponse: + """Get file shares provisioning parameters recommendation. + + :param location: The name of the Azure region. Required. + :type location: str + :param body: The request body. Required. + :type body: ~azure.mgmt.fileshares.models.FileShareProvisioningRecommendationRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: FileShareProvisioningRecommendationResponse. The + FileShareProvisioningRecommendationResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.fileshares.models.FileShareProvisioningRecommendationResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def get_provisioning_recommendation( + self, location: str, body: JSON, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.FileShareProvisioningRecommendationResponse: + """Get file shares provisioning parameters recommendation. + + :param location: The name of the Azure region. Required. + :type location: str + :param body: The request body. Required. + :type body: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: FileShareProvisioningRecommendationResponse. The + FileShareProvisioningRecommendationResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.fileshares.models.FileShareProvisioningRecommendationResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def get_provisioning_recommendation( + self, location: str, body: IO[bytes], *, content_type: str = "application/json", **kwargs: Any + ) -> _models.FileShareProvisioningRecommendationResponse: + """Get file shares provisioning parameters recommendation. + + :param location: The name of the Azure region. Required. + :type location: str + :param body: The request body. Required. + :type body: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: FileShareProvisioningRecommendationResponse. The + FileShareProvisioningRecommendationResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.fileshares.models.FileShareProvisioningRecommendationResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def get_provisioning_recommendation( + self, + location: str, + body: Union[_models.FileShareProvisioningRecommendationRequest, JSON, IO[bytes]], + **kwargs: Any + ) -> _models.FileShareProvisioningRecommendationResponse: + """Get file shares provisioning parameters recommendation. + + :param location: The name of the Azure region. Required. + :type location: str + :param body: The request body. Is one of the following types: + FileShareProvisioningRecommendationRequest, JSON, IO[bytes] Required. + :type body: ~azure.mgmt.fileshares.models.FileShareProvisioningRecommendationRequest or JSON or + IO[bytes] + :return: FileShareProvisioningRecommendationResponse. The + FileShareProvisioningRecommendationResponse is compatible with MutableMapping + :rtype: ~azure.mgmt.fileshares.models.FileShareProvisioningRecommendationResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FileShareProvisioningRecommendationResponse] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _content = json.dumps(body, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_informational_operations_get_provisioning_recommendation_request( + location=location, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.FileShareProvisioningRecommendationResponse, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + +class PrivateEndpointConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.fileshares.FileSharesMgmtClient`'s + :attr:`private_endpoint_connections` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: FileSharesMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + @api_version_validation( + method_added_on="2025-09-01-preview", + params_added_on={ + "2025-09-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "resource_name", + "private_endpoint_connection_name", + "accept", + ] + }, + api_versions_list=["2025-09-01-preview", "2026-06-01"], + ) + def get( + self, resource_group_name: str, resource_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> _models.PrivateEndpointConnection: + """Gets the specified private endpoint connection associated with the file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: PrivateEndpointConnection. The PrivateEndpointConnection is compatible with + MutableMapping + :rtype: ~azure.mgmt.fileshares.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None) + + _request = build_private_endpoint_connections_get_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.PrivateEndpointConnection, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @api_version_validation( + method_added_on="2025-09-01-preview", + params_added_on={ + "2025-09-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "resource_name", + "private_endpoint_connection_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-09-01-preview", "2026-06-01"], + ) + def _create_initial( + self, + resource_group_name: str, + resource_name: str, + private_endpoint_connection_name: str, + resource: Union[_models.PrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _content = None + if isinstance(resource, (IOBase, bytes)): + _content = resource + else: + _content = json.dumps(resource, cls=SdkJSONEncoder, exclude_readonly=True) # type: ignore + + _request = build_private_endpoint_connections_create_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + content_type=content_type, + api_version=self._config.api_version, + content=_content, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @overload + def begin_create( + self, + resource_group_name: str, + resource_name: str, + private_endpoint_connection_name: str, + resource: _models.PrivateEndpointConnection, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.PrivateEndpointConnection]: + """Update the state of specified private endpoint connection associated with the file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: The private endpoint connection properties. Required. + :type resource: ~azure.mgmt.fileshares.models.PrivateEndpointConnection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns PrivateEndpointConnection. The + PrivateEndpointConnection is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.fileshares.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create( + self, + resource_group_name: str, + resource_name: str, + private_endpoint_connection_name: str, + resource: JSON, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.PrivateEndpointConnection]: + """Update the state of specified private endpoint connection associated with the file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: The private endpoint connection properties. Required. + :type resource: JSON + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns PrivateEndpointConnection. The + PrivateEndpointConnection is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.fileshares.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create( + self, + resource_group_name: str, + resource_name: str, + private_endpoint_connection_name: str, + resource: IO[bytes], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.PrivateEndpointConnection]: + """Update the state of specified private endpoint connection associated with the file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: The private endpoint connection properties. Required. + :type resource: IO[bytes] + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :return: An instance of LROPoller that returns PrivateEndpointConnection. The + PrivateEndpointConnection is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.fileshares.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + @api_version_validation( + method_added_on="2025-09-01-preview", + params_added_on={ + "2025-09-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "resource_name", + "private_endpoint_connection_name", + "content_type", + "accept", + ] + }, + api_versions_list=["2025-09-01-preview", "2026-06-01"], + ) + def begin_create( + self, + resource_group_name: str, + resource_name: str, + private_endpoint_connection_name: str, + resource: Union[_models.PrivateEndpointConnection, JSON, IO[bytes]], + **kwargs: Any + ) -> LROPoller[_models.PrivateEndpointConnection]: + """Update the state of specified private endpoint connection associated with the file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :param resource: The private endpoint connection properties. Is one of the following types: + PrivateEndpointConnection, JSON, IO[bytes] Required. + :type resource: ~azure.mgmt.fileshares.models.PrivateEndpointConnection or JSON or IO[bytes] + :return: An instance of LROPoller that returns PrivateEndpointConnection. The + PrivateEndpointConnection is compatible with MutableMapping + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.fileshares.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = kwargs.pop("params", {}) or {} + + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._create_initial( + resource_group_name=resource_group_name, + resource_name=resource_name, + private_endpoint_connection_name=private_endpoint_connection_name, + resource=resource, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): + response = pipeline_response.http_response + deserialized = _deserialize(_models.PrivateEndpointConnection, response.json()) + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + return deserialized + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[_models.PrivateEndpointConnection].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[_models.PrivateEndpointConnection]( + self._client, raw_result, get_long_running_output, polling_method # type: ignore + ) + + @api_version_validation( + method_added_on="2025-09-01-preview", + params_added_on={ + "2025-09-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "resource_name", + "private_endpoint_connection_name", + ] + }, + api_versions_list=["2025-09-01-preview", "2026-06-01"], + ) + def _delete_initial( + self, resource_group_name: str, resource_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> Iterator[bytes]: + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[Iterator[bytes]] = kwargs.pop("cls", None) + + _request = build_private_endpoint_connections_delete_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = True + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200, 202, 204]: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + if response.status_code == 202: + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-09-01-preview", + params_added_on={ + "2025-09-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "resource_name", + "private_endpoint_connection_name", + ] + }, + api_versions_list=["2025-09-01-preview", "2026-06-01"], + ) + def begin_delete( + self, resource_group_name: str, resource_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> LROPoller[None]: + """Deletes the specified private endpoint connection associated with the file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param private_endpoint_connection_name: The name of the private endpoint connection associated + with the Azure resource. Required. + :type private_endpoint_connection_name: str + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._delete_initial( + resource_group_name=resource_group_name, + resource_name=resource_name, + private_endpoint_connection_name=private_endpoint_connection_name, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + raw_result.http_response.read() # type: ignore + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) # type: ignore + + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller[None].from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller[None](self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-09-01-preview", + params_added_on={ + "2025-09-01-preview": ["api_version", "subscription_id", "resource_group_name", "resource_name", "accept"] + }, + api_versions_list=["2025-09-01-preview", "2026-06-01"], + ) + def list_by_file_share( + self, resource_group_name: str, resource_name: str, **kwargs: Any + ) -> ItemPaged["_models.PrivateEndpointConnection"]: + """Get a PrivateEndpointConnection List. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :return: An iterator like instance of PrivateEndpointConnection + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.fileshares.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.PrivateEndpointConnection]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_private_endpoint_connections_list_by_file_share_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.PrivateEndpointConnection], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) + + +class PrivateLinkResourcesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.fileshares.FileSharesMgmtClient`'s + :attr:`private_link_resources` attribute. + """ + + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client: PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") + self._config: FileSharesMgmtClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize: Serializer = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize: Deserializer = input_args.pop(0) if input_args else kwargs.pop("deserializer") + + @distributed_trace + @api_version_validation( + method_added_on="2025-09-01-preview", + params_added_on={ + "2025-09-01-preview": [ + "api_version", + "subscription_id", + "resource_group_name", + "resource_name", + "private_link_resource_name", + "accept", + ] + }, + api_versions_list=["2025-09-01-preview", "2026-06-01"], + ) + def get( + self, resource_group_name: str, resource_name: str, private_link_resource_name: str, **kwargs: Any + ) -> _models.PrivateLinkResource: + """Gets the private link resources that need to be created for a file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :param private_link_resource_name: The name of the private link resource. Required. + :type private_link_resource_name: str + :return: PrivateLinkResource. The PrivateLinkResource is compatible with MutableMapping + :rtype: ~azure.mgmt.fileshares.models.PrivateLinkResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[_models.PrivateLinkResource] = kwargs.pop("cls", None) + + _request = build_private_link_resources_get_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + private_link_resource_name=private_link_resource_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url("self._config.base_url", self._config.base_url, "str", skip_quote=True), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + _decompress = kwargs.pop("decompress", True) + _stream = kwargs.pop("stream", False) + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [200]: + if _stream: + try: + response.read() # Load the body in memory and close the socket + except (StreamConsumedError, StreamClosedError): + pass + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + if _stream: + deserialized = response.iter_bytes() if _decompress else response.iter_raw() + else: + deserialized = _deserialize(_models.PrivateLinkResource, response.json()) + + if cls: + return cls(pipeline_response, deserialized, {}) # type: ignore + + return deserialized # type: ignore + + @distributed_trace + @api_version_validation( + method_added_on="2025-09-01-preview", + params_added_on={ + "2025-09-01-preview": ["api_version", "subscription_id", "resource_group_name", "resource_name", "accept"] + }, + api_versions_list=["2025-09-01-preview", "2026-06-01"], + ) + def list( + self, resource_group_name: str, resource_name: str, **kwargs: Any + ) -> ItemPaged["_models.PrivateLinkResource"]: + """Gets the private link resources that need to be created for a file share. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param resource_name: The resource name of the file share, as seen by the administrator through + Azure Resource Manager. Required. + :type resource_name: str + :return: An iterator like instance of PrivateLinkResource + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.fileshares.models.PrivateLinkResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = kwargs.pop("headers", {}) or {} + _params = kwargs.pop("params", {}) or {} + + cls: ClsType[List[_models.PrivateLinkResource]] = kwargs.pop("cls", None) + + error_map: MutableMapping = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + def prepare_request(next_link=None): + if not next_link: + + _request = build_private_link_resources_list_request( + resource_group_name=resource_group_name, + resource_name=resource_name, + subscription_id=self._config.subscription_id, + api_version=self._config.api_version, + headers=_headers, + params=_params, + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + else: + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + _request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params + ) + path_format_arguments = { + "endpoint": self._serialize.url( + "self._config.base_url", self._config.base_url, "str", skip_quote=True + ), + } + _request.url = self._client.format_url(_request.url, **path_format_arguments) + + return _request + + def extract_data(pipeline_response): + deserialized = pipeline_response.http_response.json() + list_of_elem = _deserialize( + List[_models.PrivateLinkResource], + deserialized.get("value", []), + ) + if cls: + list_of_elem = cls(list_of_elem) # type: ignore + return deserialized.get("nextLink") or None, iter(list_of_elem) + + def get_next(next_link=None): + _request = prepare_request(next_link) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + _request, stream=_stream, **kwargs + ) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = _failsafe_deserialize( + _models.ErrorResponse, + response, + ) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged(get_next, extract_data) diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/operations/_patch.py b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/operations/_patch.py new file mode 100644 index 000000000000..ea765788358a --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/operations/_patch.py @@ -0,0 +1,20 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" + +__all__: list[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/py.typed b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/py.typed new file mode 100644 index 000000000000..e5aff4f83af8 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/azure/mgmt/fileshares/py.typed @@ -0,0 +1 @@ +# Marker file for PEP 561. \ No newline at end of file diff --git a/sdk/fileshares/azure-mgmt-fileshares/dev_requirements.txt b/sdk/fileshares/azure-mgmt-fileshares/dev_requirements.txt new file mode 100644 index 000000000000..ece056fe0984 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/dev_requirements.txt @@ -0,0 +1,5 @@ +-e ../../../eng/tools/azure-sdk-tools +../../core/azure-core +../../identity/azure-identity +../../core/azure-mgmt-core +aiohttp \ No newline at end of file diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_limits_maximum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_limits_maximum_set_gen.py new file mode 100644 index 000000000000..301ff0390e20 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_limits_maximum_set_gen.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_share_get_limits_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.informational_operations.get_limits( + location="westus", + ) + print(response) + + +# x-ms-original-file: 2026-06-01/FileShare_GetLimits_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_limits_minimum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_limits_minimum_set_gen.py new file mode 100644 index 000000000000..96cc3f0fe2d6 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_limits_minimum_set_gen.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_share_get_limits_minimum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.informational_operations.get_limits( + location="westus", + ) + print(response) + + +# x-ms-original-file: 2026-06-01/FileShare_GetLimits_MinimumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_provisioning_recommendation_maximum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_provisioning_recommendation_maximum_set_gen.py new file mode 100644 index 000000000000..3910dcc08e83 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_provisioning_recommendation_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_share_get_provisioning_recommendation_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.informational_operations.get_provisioning_recommendation( + location="westus", + body={"properties": {"provisionedStorageGiB": 7}}, + ) + print(response) + + +# x-ms-original-file: 2026-06-01/FileShare_GetProvisioningRecommendation_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_provisioning_recommendation_minimum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_provisioning_recommendation_minimum_set_gen.py new file mode 100644 index 000000000000..035eb5992f45 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_provisioning_recommendation_minimum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_share_get_provisioning_recommendation_minimum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.informational_operations.get_provisioning_recommendation( + location="westus", + body={"properties": {"provisionedStorageGiB": 7}}, + ) + print(response) + + +# x-ms-original-file: 2026-06-01/FileShare_GetProvisioningRecommendation_MinimumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_usage_data_maximum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_usage_data_maximum_set_gen.py new file mode 100644 index 000000000000..0699daa77bb8 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_usage_data_maximum_set_gen.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_share_get_usage_data_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.informational_operations.get_usage_data( + location="westus", + ) + print(response) + + +# x-ms-original-file: 2026-06-01/FileShare_GetUsageData_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_usage_data_minimum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_usage_data_minimum_set_gen.py new file mode 100644 index 000000000000..64e1118e576e --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_get_usage_data_minimum_set_gen.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_share_get_usage_data_minimum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.informational_operations.get_usage_data( + location="westus", + ) + print(response) + + +# x-ms-original-file: 2026-06-01/FileShare_GetUsageData_MinimumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_create_or_update_maximum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..b579812dbea8 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_create_or_update_maximum_set_gen.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_share_snapshot_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.file_share_snapshots.begin_create_or_update_file_share_snapshot( + resource_group_name="rgfileshares", + resource_name="fileshare", + name="testfilesharesnapshot", + resource={"properties": {"initiatorId": "backup-vault-001", "metadata": {"key9372": "jtc"}}}, + ).result() + print(response) + + +# x-ms-original-file: 2026-06-01/FileShareSnapshot_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_delete_maximum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_delete_maximum_set_gen.py new file mode 100644 index 000000000000..09bdb5b7785b --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_delete_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_share_snapshot_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.file_share_snapshots.begin_delete_file_share_snapshot( + resource_group_name="rgfileshares", + resource_name="fileshare", + name="testfilesharesnapshot", + ).result() + + +# x-ms-original-file: 2026-06-01/FileShareSnapshot_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_get_maximum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_get_maximum_set_gen.py new file mode 100644 index 000000000000..7cd9ce8d0a2a --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_get_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_share_snapshot_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.file_share_snapshots.get_file_share_snapshot( + resource_group_name="rgfileshares", + resource_name="fileshare", + name="testfilesharesnapshot", + ) + print(response) + + +# x-ms-original-file: 2026-06-01/FileShareSnapshot_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_list_maximum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_list_maximum_set_gen.py new file mode 100644 index 000000000000..adc73bab4df6 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_list_maximum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_share_snapshot_list_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.file_share_snapshots.list_by_file_share( + resource_group_name="rgfileshares", + resource_name="fileshare", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-06-01/FileShareSnapshot_List_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_list_minimum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_list_minimum_set_gen.py new file mode 100644 index 000000000000..86c3a100b89d --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_list_minimum_set_gen.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_share_snapshot_list_minimum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.file_share_snapshots.list_by_file_share( + resource_group_name="rgfileshares", + resource_name="testfileshare", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-06-01/FileShareSnapshot_List_MinimumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_update_maximum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_update_maximum_set_gen.py new file mode 100644 index 000000000000..b28bb88a0477 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_share_snapshot_update_maximum_set_gen.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_share_snapshot_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.file_share_snapshots.begin_update_file_share_snapshot( + resource_group_name="rgfileshares", + resource_name="fileshare", + name="testfilesharesnapshot", + properties={"properties": {"metadata": {"key491": "dalhvhxqhjszelfuueetvxmgkbukwa"}}}, + ).result() + print(response) + + +# x-ms-original-file: 2026-06-01/FileShareSnapshot_Update_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_check_name_availability_maximum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_check_name_availability_maximum_set_gen.py new file mode 100644 index 000000000000..dec780be6c21 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_check_name_availability_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_shares_check_name_availability_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.file_shares.check_name_availability( + location="westus", + body={"name": "fvykqbgmd", "type": "Microsoft.FileShares/fileShares"}, + ) + print(response) + + +# x-ms-original-file: 2026-06-01/FileShares_CheckNameAvailability_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_check_name_availability_minimum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_check_name_availability_minimum_set_gen.py new file mode 100644 index 000000000000..1a44d8326ff8 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_check_name_availability_minimum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_shares_check_name_availability_minimum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.file_shares.check_name_availability( + location="westus", + body={"name": "str", "type": "str"}, + ) + print(response) + + +# x-ms-original-file: 2026-06-01/FileShares_CheckNameAvailability_MinimumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_create_or_update_maximum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_create_or_update_maximum_set_gen.py new file mode 100644 index 000000000000..43aaef74e5d0 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_create_or_update_maximum_set_gen.py @@ -0,0 +1,63 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_shares_create_or_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.file_shares.begin_create_or_update( + resource_group_name="rgfileshares", + resource_name="fileshare", + resource={ + "location": "westus", + "properties": { + "mediaTier": "SSD", + "mountName": "fileshare", + "nfsProtocolProperties": {"encryptionInTransitRequired": "Enabled", "rootSquash": "NoRootSquash"}, + "protocol": "NFS", + "provisionedIOPerSec": 5, + "provisionedStorageGiB": 8, + "provisionedThroughputMiBPerSec": 22, + "publicAccessProperties": { + "allowedSubnets": [ + "/subscriptions/9760acf5-4638-11e7-9bdb-020073ca7778/resourceGroups/myRP/providers/Microsoft.Network/virtualNetworks/testvnet3/subnets/testsubnet3" + ] + }, + "publicNetworkAccess": "Enabled", + "redundancy": "Local", + }, + "tags": {"key9647": "xwokdvyoae"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-06-01/FileShares_CreateOrUpdate_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_delete_maximum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_delete_maximum_set_gen.py new file mode 100644 index 000000000000..ed9d811150a6 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_delete_maximum_set_gen.py @@ -0,0 +1,41 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_shares_delete_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.file_shares.begin_delete( + resource_group_name="rgfileshares", + resource_name="fileshare", + ).result() + + +# x-ms-original-file: 2026-06-01/FileShares_Delete_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_get_maximum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_get_maximum_set_gen.py new file mode 100644 index 000000000000..a07f66985009 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_get_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_shares_get_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.file_shares.get( + resource_group_name="rgfileshares", + resource_name="fileshare", + ) + print(response) + + +# x-ms-original-file: 2026-06-01/FileShares_Get_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_list_by_parent_maximum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_list_by_parent_maximum_set_gen.py new file mode 100644 index 000000000000..bc0d31669296 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_list_by_parent_maximum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_shares_list_by_parent_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.file_shares.list_by_parent( + resource_group_name="rgfileshares", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-06-01/FileShares_ListByParent_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_list_by_parent_minimum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_list_by_parent_minimum_set_gen.py new file mode 100644 index 000000000000..ac342cec4d66 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_list_by_parent_minimum_set_gen.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_shares_list_by_parent_minimum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.file_shares.list_by_parent( + resource_group_name="rgfileshares", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-06-01/FileShares_ListByParent_MinimumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_list_by_subscription_maximum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_list_by_subscription_maximum_set_gen.py new file mode 100644 index 000000000000..6a6e645f74b6 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_list_by_subscription_maximum_set_gen.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_shares_list_by_subscription_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.file_shares.list_by_subscription() + for item in response: + print(item) + + +# x-ms-original-file: 2026-06-01/FileShares_ListBySubscription_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_list_by_subscription_minimum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_list_by_subscription_minimum_set_gen.py new file mode 100644 index 000000000000..320e98973acd --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_list_by_subscription_minimum_set_gen.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_shares_list_by_subscription_minimum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.file_shares.list_by_subscription() + for item in response: + print(item) + + +# x-ms-original-file: 2026-06-01/FileShares_ListBySubscription_MinimumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_update_maximum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_update_maximum_set_gen.py new file mode 100644 index 000000000000..9ad82565d8cc --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/file_shares_update_maximum_set_gen.py @@ -0,0 +1,58 @@ +# pylint: disable=line-too-long,useless-suppression +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python file_shares_update_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.file_shares.begin_update( + resource_group_name="rgfileshares", + resource_name="fileshare", + properties={ + "properties": { + "nfsProtocolProperties": {"encryptionInTransitRequired": "Enabled", "rootSquash": "NoRootSquash"}, + "provisionedIOPerSec": 1, + "provisionedStorageGiB": 7, + "provisionedThroughputMiBPerSec": 29, + "publicAccessProperties": { + "allowedSubnets": [ + "/subscriptions/9760acf5-4638-11e7-9bdb-020073ca7778/resourceGroups/myRP/providers/Microsoft.Network/virtualNetworks/testvnet3/subnets/testsubnet3" + ] + }, + "publicNetworkAccess": "Enabled", + }, + "tags": {"key173": "uyf"}, + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-06-01/FileShares_Update_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/operations_list_maximum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/operations_list_maximum_set_gen.py new file mode 100644 index 000000000000..c0bc604b64ec --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/operations_list_maximum_set_gen.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python operations_list_maximum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.operations.list() + for item in response: + print(item) + + +# x-ms-original-file: 2026-06-01/Operations_List_MaximumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/operations_list_minimum_set_gen.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/operations_list_minimum_set_gen.py new file mode 100644 index 000000000000..ebb7aa0b274f --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/operations_list_minimum_set_gen.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python operations_list_minimum_set_gen.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.operations.list() + for item in response: + print(item) + + +# x-ms-original-file: 2026-06-01/Operations_List_MinimumSet_Gen.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_endpoint_connections_create.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_endpoint_connections_create.py new file mode 100644 index 000000000000..22f5ff803cab --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_endpoint_connections_create.py @@ -0,0 +1,48 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python private_endpoint_connections_create.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.private_endpoint_connections.begin_create( + resource_group_name="rgfileshares", + resource_name="fileshare", + private_endpoint_connection_name="privateEndpointConnection1", + resource={ + "properties": { + "privateLinkServiceConnectionState": {"description": "Approved by admin", "status": "Approved"} + } + }, + ).result() + print(response) + + +# x-ms-original-file: 2026-06-01/PrivateEndpointConnections_Create.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_endpoint_connections_delete.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_endpoint_connections_delete.py new file mode 100644 index 000000000000..9aa9e3aa07c0 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_endpoint_connections_delete.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python private_endpoint_connections_delete.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + client.private_endpoint_connections.begin_delete( + resource_group_name="rgfileshares", + resource_name="fileshare", + private_endpoint_connection_name="privateEndpointConnection1", + ).result() + + +# x-ms-original-file: 2026-06-01/PrivateEndpointConnections_Delete.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_endpoint_connections_get.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_endpoint_connections_get.py new file mode 100644 index 000000000000..c41c32926dfe --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_endpoint_connections_get.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python private_endpoint_connections_get.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.private_endpoint_connections.get( + resource_group_name="rgfileshares", + resource_name="fileshare", + private_endpoint_connection_name="privateEndpointConnection1", + ) + print(response) + + +# x-ms-original-file: 2026-06-01/PrivateEndpointConnections_Get.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_endpoint_connections_list_by_file_share.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_endpoint_connections_list_by_file_share.py new file mode 100644 index 000000000000..e370a4e19b75 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_endpoint_connections_list_by_file_share.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python private_endpoint_connections_list_by_file_share.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.private_endpoint_connections.list_by_file_share( + resource_group_name="rgfileshares", + resource_name="fileshare", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-06-01/PrivateEndpointConnections_ListByFileShare.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_link_resources_get.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_link_resources_get.py new file mode 100644 index 000000000000..514b8642d209 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_link_resources_get.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python private_link_resources_get.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.private_link_resources.get( + resource_group_name="res4303", + resource_name="testfileshare01", + private_link_resource_name="fileshare", + ) + print(response) + + +# x-ms-original-file: 2026-06-01/PrivateLinkResources_Get.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_link_resources_list_by_file_share.py b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_link_resources_list_by_file_share.py new file mode 100644 index 000000000000..46fb4e3c96a6 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_samples/private_link_resources_list_by_file_share.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from azure.identity import DefaultAzureCredential + +from azure.mgmt.fileshares import FileSharesMgmtClient + +""" +# PREREQUISITES + pip install azure-identity + pip install azure-mgmt-fileshares +# USAGE + python private_link_resources_list_by_file_share.py + + Before run the sample, please set the values of the client ID, tenant ID and client secret + of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID, + AZURE_CLIENT_SECRET. For more info about how to get the value, please see: + https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal +""" + + +def main(): + client = FileSharesMgmtClient( + credential=DefaultAzureCredential(), + subscription_id="SUBSCRIPTION_ID", + ) + + response = client.private_link_resources.list( + resource_group_name="res4303", + resource_name="testfileshare01", + ) + for item in response: + print(item) + + +# x-ms-original-file: 2026-06-01/PrivateLinkResources_ListByFileShare.json +if __name__ == "__main__": + main() diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_tests/conftest.py b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/conftest.py new file mode 100644 index 000000000000..d063c8fcb8cc --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/conftest.py @@ -0,0 +1,35 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import os +import pytest +from dotenv import load_dotenv +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_body_key_sanitizer, + add_header_regex_sanitizer, +) + +load_dotenv() + + +# For security, please avoid record sensitive identity information in recordings +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy): + filesharesmgmt_subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000") + filesharesmgmt_tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000") + filesharesmgmt_client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + filesharesmgmt_client_secret = os.environ.get("AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=filesharesmgmt_subscription_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=filesharesmgmt_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=filesharesmgmt_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=filesharesmgmt_client_secret, value="00000000-0000-0000-0000-000000000000") + + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") + add_header_regex_sanitizer(key="Cookie", value="cookie;") + add_body_key_sanitizer(json_path="$..access_token", value="access_token") diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_file_share_snapshots_operations.py b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_file_share_snapshots_operations.py new file mode 100644 index 000000000000..f2f6b1e76851 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_file_share_snapshots_operations.py @@ -0,0 +1,93 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.fileshares import FileSharesMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestFileSharesMgmtFileShareSnapshotsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(FileSharesMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_file_share_snapshots_get_file_share_snapshot(self, resource_group): + response = self.client.file_share_snapshots.get_file_share_snapshot( + resource_group_name=resource_group.name, + resource_name="str", + name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_file_share_snapshots_begin_create_or_update_file_share_snapshot(self, resource_group): + response = self.client.file_share_snapshots.begin_create_or_update_file_share_snapshot( + resource_group_name=resource_group.name, + resource_name="str", + name="str", + resource={ + "id": "str", + "name": "str", + "properties": {"initiatorId": "str", "metadata": {"str": "str"}, "snapshotTime": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_file_share_snapshots_begin_update_file_share_snapshot(self, resource_group): + response = self.client.file_share_snapshots.begin_update_file_share_snapshot( + resource_group_name=resource_group.name, + resource_name="str", + name="str", + properties={"properties": {"metadata": {"str": "str"}}}, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_file_share_snapshots_begin_delete_file_share_snapshot(self, resource_group): + response = self.client.file_share_snapshots.begin_delete_file_share_snapshot( + resource_group_name=resource_group.name, + resource_name="str", + name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_file_share_snapshots_list_by_file_share(self, resource_group): + response = self.client.file_share_snapshots.list_by_file_share( + resource_group_name=resource_group.name, + resource_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_file_share_snapshots_operations_async.py b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_file_share_snapshots_operations_async.py new file mode 100644 index 000000000000..d9fd356f6666 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_file_share_snapshots_operations_async.py @@ -0,0 +1,100 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.fileshares.aio import FileSharesMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestFileSharesMgmtFileShareSnapshotsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(FileSharesMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_file_share_snapshots_get_file_share_snapshot(self, resource_group): + response = await self.client.file_share_snapshots.get_file_share_snapshot( + resource_group_name=resource_group.name, + resource_name="str", + name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_file_share_snapshots_begin_create_or_update_file_share_snapshot(self, resource_group): + response = await ( + await self.client.file_share_snapshots.begin_create_or_update_file_share_snapshot( + resource_group_name=resource_group.name, + resource_name="str", + name="str", + resource={ + "id": "str", + "name": "str", + "properties": {"initiatorId": "str", "metadata": {"str": "str"}, "snapshotTime": "str"}, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_file_share_snapshots_begin_update_file_share_snapshot(self, resource_group): + response = await ( + await self.client.file_share_snapshots.begin_update_file_share_snapshot( + resource_group_name=resource_group.name, + resource_name="str", + name="str", + properties={"properties": {"metadata": {"str": "str"}}}, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_file_share_snapshots_begin_delete_file_share_snapshot(self, resource_group): + response = await ( + await self.client.file_share_snapshots.begin_delete_file_share_snapshot( + resource_group_name=resource_group.name, + resource_name="str", + name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_file_share_snapshots_list_by_file_share(self, resource_group): + response = self.client.file_share_snapshots.list_by_file_share( + resource_group_name=resource_group.name, + resource_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_file_shares_operations.py b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_file_shares_operations.py new file mode 100644 index 000000000000..97538c79ece7 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_file_shares_operations.py @@ -0,0 +1,162 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.fileshares import FileSharesMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestFileSharesMgmtFileSharesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(FileSharesMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_file_shares_get(self, resource_group): + response = self.client.file_shares.get( + resource_group_name=resource_group.name, + resource_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_file_shares_begin_create_or_update(self, resource_group): + response = self.client.file_shares.begin_create_or_update( + resource_group_name=resource_group.name, + resource_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "hostName": "str", + "includedBurstIOPerSec": 0, + "maxBurstIOPerSecCredits": 0, + "mediaTier": "str", + "mountName": "str", + "nfsProtocolProperties": {"encryptionInTransitRequired": "str", "rootSquash": "str"}, + "privateEndpointConnections": [ + { + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + } + ], + "protocol": "str", + "provisionedIOPerSec": 0, + "provisionedIOPerSecNextAllowedDowngrade": "2020-02-20 00:00:00", + "provisionedStorageGiB": 0, + "provisionedStorageNextAllowedDowngrade": "2020-02-20 00:00:00", + "provisionedThroughputMiBPerSec": 0, + "provisionedThroughputNextAllowedDowngrade": "2020-02-20 00:00:00", + "provisioningState": "str", + "publicAccessProperties": {"allowedSubnets": ["str"]}, + "publicNetworkAccess": "str", + "redundancy": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_file_shares_begin_update(self, resource_group): + response = self.client.file_shares.begin_update( + resource_group_name=resource_group.name, + resource_name="str", + properties={ + "properties": { + "nfsProtocolProperties": {"encryptionInTransitRequired": "str", "rootSquash": "str"}, + "provisionedIOPerSec": 0, + "provisionedStorageGiB": 0, + "provisionedThroughputMiBPerSec": 0, + "publicAccessProperties": {"allowedSubnets": ["str"]}, + "publicNetworkAccess": "str", + }, + "tags": {"str": "str"}, + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_file_shares_begin_delete(self, resource_group): + response = self.client.file_shares.begin_delete( + resource_group_name=resource_group.name, + resource_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_file_shares_list_by_subscription(self, resource_group): + response = self.client.file_shares.list_by_subscription() + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_file_shares_list_by_parent(self, resource_group): + response = self.client.file_shares.list_by_parent( + resource_group_name=resource_group.name, + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_file_shares_check_name_availability(self, resource_group): + response = self.client.file_shares.check_name_availability( + location="str", + body={"name": "str", "type": "str"}, + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_file_shares_operations_async.py b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_file_shares_operations_async.py new file mode 100644 index 000000000000..edd9e3b94508 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_file_shares_operations_async.py @@ -0,0 +1,169 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.fileshares.aio import FileSharesMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestFileSharesMgmtFileSharesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(FileSharesMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_file_shares_get(self, resource_group): + response = await self.client.file_shares.get( + resource_group_name=resource_group.name, + resource_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_file_shares_begin_create_or_update(self, resource_group): + response = await ( + await self.client.file_shares.begin_create_or_update( + resource_group_name=resource_group.name, + resource_name="str", + resource={ + "location": "str", + "id": "str", + "name": "str", + "properties": { + "hostName": "str", + "includedBurstIOPerSec": 0, + "maxBurstIOPerSecCredits": 0, + "mediaTier": "str", + "mountName": "str", + "nfsProtocolProperties": {"encryptionInTransitRequired": "str", "rootSquash": "str"}, + "privateEndpointConnections": [ + { + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + } + ], + "protocol": "str", + "provisionedIOPerSec": 0, + "provisionedIOPerSecNextAllowedDowngrade": "2020-02-20 00:00:00", + "provisionedStorageGiB": 0, + "provisionedStorageNextAllowedDowngrade": "2020-02-20 00:00:00", + "provisionedThroughputMiBPerSec": 0, + "provisionedThroughputNextAllowedDowngrade": "2020-02-20 00:00:00", + "provisioningState": "str", + "publicAccessProperties": {"allowedSubnets": ["str"]}, + "publicNetworkAccess": "str", + "redundancy": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "tags": {"str": "str"}, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_file_shares_begin_update(self, resource_group): + response = await ( + await self.client.file_shares.begin_update( + resource_group_name=resource_group.name, + resource_name="str", + properties={ + "properties": { + "nfsProtocolProperties": {"encryptionInTransitRequired": "str", "rootSquash": "str"}, + "provisionedIOPerSec": 0, + "provisionedStorageGiB": 0, + "provisionedThroughputMiBPerSec": 0, + "publicAccessProperties": {"allowedSubnets": ["str"]}, + "publicNetworkAccess": "str", + }, + "tags": {"str": "str"}, + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_file_shares_begin_delete(self, resource_group): + response = await ( + await self.client.file_shares.begin_delete( + resource_group_name=resource_group.name, + resource_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_file_shares_list_by_subscription(self, resource_group): + response = self.client.file_shares.list_by_subscription() + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_file_shares_list_by_parent(self, resource_group): + response = self.client.file_shares.list_by_parent( + resource_group_name=resource_group.name, + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_file_shares_check_name_availability(self, resource_group): + response = await self.client.file_shares.check_name_availability( + location="str", + body={"name": "str", "type": "str"}, + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_informational_operations_operations.py b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_informational_operations_operations.py new file mode 100644 index 000000000000..a1fb40c5d4ce --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_informational_operations_operations.py @@ -0,0 +1,50 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.fileshares import FileSharesMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestFileSharesMgmtInformationalOperationsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(FileSharesMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_informational_operations_get_usage_data(self, resource_group): + response = self.client.informational_operations.get_usage_data( + location="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_informational_operations_get_limits(self, resource_group): + response = self.client.informational_operations.get_limits( + location="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_informational_operations_get_provisioning_recommendation(self, resource_group): + response = self.client.informational_operations.get_provisioning_recommendation( + location="str", + body={"properties": {"provisionedStorageGiB": 0}}, + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_informational_operations_operations_async.py b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_informational_operations_operations_async.py new file mode 100644 index 000000000000..14dad5ad6d4b --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_informational_operations_operations_async.py @@ -0,0 +1,51 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.fileshares.aio import FileSharesMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestFileSharesMgmtInformationalOperationsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(FileSharesMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_informational_operations_get_usage_data(self, resource_group): + response = await self.client.informational_operations.get_usage_data( + location="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_informational_operations_get_limits(self, resource_group): + response = await self.client.informational_operations.get_limits( + location="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_informational_operations_get_provisioning_recommendation(self, resource_group): + response = await self.client.informational_operations.get_provisioning_recommendation( + location="str", + body={"properties": {"provisionedStorageGiB": 0}}, + ) + + # please add some check logic here by yourself + # ... diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_operations.py b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_operations.py new file mode 100644 index 000000000000..4f07c79fbf89 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_operations.py @@ -0,0 +1,27 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.fileshares import FileSharesMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestFileSharesMgmtOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(FileSharesMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_operations_list(self, resource_group): + response = self.client.operations.list() + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_operations_async.py b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_operations_async.py new file mode 100644 index 000000000000..00268a38cab0 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_operations_async.py @@ -0,0 +1,28 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.fileshares.aio import FileSharesMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestFileSharesMgmtOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(FileSharesMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_operations_list(self, resource_group): + response = self.client.operations.list() + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_private_endpoint_connections_operations.py b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_private_endpoint_connections_operations.py new file mode 100644 index 000000000000..8eb06ca4f278 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_private_endpoint_connections_operations.py @@ -0,0 +1,89 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.fileshares import FileSharesMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestFileSharesMgmtPrivateEndpointConnectionsOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(FileSharesMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_private_endpoint_connections_get(self, resource_group): + response = self.client.private_endpoint_connections.get( + resource_group_name=resource_group.name, + resource_name="str", + private_endpoint_connection_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_private_endpoint_connections_begin_create(self, resource_group): + response = self.client.private_endpoint_connections.begin_create( + resource_group_name=resource_group.name, + resource_name="str", + private_endpoint_connection_name="str", + resource={ + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_private_endpoint_connections_begin_delete(self, resource_group): + response = self.client.private_endpoint_connections.begin_delete( + resource_group_name=resource_group.name, + resource_name="str", + private_endpoint_connection_name="str", + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_private_endpoint_connections_list_by_file_share(self, resource_group): + response = self.client.private_endpoint_connections.list_by_file_share( + resource_group_name=resource_group.name, + resource_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_private_endpoint_connections_operations_async.py b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_private_endpoint_connections_operations_async.py new file mode 100644 index 000000000000..925d4caac371 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_private_endpoint_connections_operations_async.py @@ -0,0 +1,94 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.fileshares.aio import FileSharesMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestFileSharesMgmtPrivateEndpointConnectionsOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(FileSharesMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_private_endpoint_connections_get(self, resource_group): + response = await self.client.private_endpoint_connections.get( + resource_group_name=resource_group.name, + resource_name="str", + private_endpoint_connection_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_private_endpoint_connections_begin_create(self, resource_group): + response = await ( + await self.client.private_endpoint_connections.begin_create( + resource_group_name=resource_group.name, + resource_name="str", + private_endpoint_connection_name="str", + resource={ + "id": "str", + "name": "str", + "properties": { + "privateLinkServiceConnectionState": { + "actionsRequired": "str", + "description": "str", + "status": "str", + }, + "groupIds": ["str"], + "privateEndpoint": {"id": "str"}, + "provisioningState": "str", + }, + "systemData": { + "createdAt": "2020-02-20 00:00:00", + "createdBy": "str", + "createdByType": "str", + "lastModifiedAt": "2020-02-20 00:00:00", + "lastModifiedBy": "str", + "lastModifiedByType": "str", + }, + "type": "str", + }, + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_private_endpoint_connections_begin_delete(self, resource_group): + response = await ( + await self.client.private_endpoint_connections.begin_delete( + resource_group_name=resource_group.name, + resource_name="str", + private_endpoint_connection_name="str", + ) + ).result() # call '.result()' to poll until service return final result + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_private_endpoint_connections_list_by_file_share(self, resource_group): + response = self.client.private_endpoint_connections.list_by_file_share( + resource_group_name=resource_group.name, + resource_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_private_link_resources_operations.py b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_private_link_resources_operations.py new file mode 100644 index 000000000000..810f90996052 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_private_link_resources_operations.py @@ -0,0 +1,42 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.fileshares import FileSharesMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, recorded_by_proxy + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestFileSharesMgmtPrivateLinkResourcesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(FileSharesMgmtClient) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_private_link_resources_get(self, resource_group): + response = self.client.private_link_resources.get( + resource_group_name=resource_group.name, + resource_name="str", + private_link_resource_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy + def test_private_link_resources_list(self, resource_group): + response = self.client.private_link_resources.list( + resource_group_name=resource_group.name, + resource_name="str", + ) + result = [r for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_private_link_resources_operations_async.py b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_private_link_resources_operations_async.py new file mode 100644 index 000000000000..6501b6c298c6 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/generated_tests/test_file_shares_mgmt_private_link_resources_operations_async.py @@ -0,0 +1,43 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) Python Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +import pytest +from azure.mgmt.fileshares.aio import FileSharesMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer +from devtools_testutils.aio import recorded_by_proxy_async + +AZURE_LOCATION = "eastus" + + +@pytest.mark.skip("you may need to update the auto-generated test case before run it") +class TestFileSharesMgmtPrivateLinkResourcesOperationsAsync(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client(FileSharesMgmtClient, is_async=True) + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_private_link_resources_get(self, resource_group): + response = await self.client.private_link_resources.get( + resource_group_name=resource_group.name, + resource_name="str", + private_link_resource_name="str", + ) + + # please add some check logic here by yourself + # ... + + @RandomNameResourceGroupPreparer(location=AZURE_LOCATION) + @recorded_by_proxy_async + async def test_private_link_resources_list(self, resource_group): + response = self.client.private_link_resources.list( + resource_group_name=resource_group.name, + resource_name="str", + ) + result = [r async for r in response] + # please add some check logic here by yourself + # ... diff --git a/sdk/fileshares/azure-mgmt-fileshares/pyproject.toml b/sdk/fileshares/azure-mgmt-fileshares/pyproject.toml new file mode 100644 index 000000000000..221e513e47d2 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/pyproject.toml @@ -0,0 +1,86 @@ +[build-system] +requires = [ + "setuptools>=77.0.3", + "wheel", +] +build-backend = "setuptools.build_meta" + +[project] +name = "azure-mgmt-fileshares" +authors = [ + { name = "Microsoft Corporation", email = "azpysdkhelp@microsoft.com" }, +] +description = "Microsoft Azure Fileshares Management Client Library for Python" +license = "MIT" +classifiers = [ + "Development Status :: 4 - Beta", + "Programming Language :: Python", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", +] +requires-python = ">=3.9" +keywords = [ + "azure", + "azure sdk", +] +dependencies = [ + "isodate>=0.6.1", + "azure-mgmt-core>=1.6.0", + "typing-extensions>=4.6.0", +] +dynamic = [ + "version", + "readme", +] + +[project.urls] +repository = "https://github.com/Azure/azure-sdk-for-python" + +[tool.setuptools.dynamic.version] +attr = "azure.mgmt.fileshares._version.VERSION" + +[tool.setuptools.dynamic.readme] +file = [ + "README.md", + "CHANGELOG.md", +] +content-type = "text/markdown" + +[tool.setuptools.packages.find] +exclude = [ + "tests*", + "generated_tests*", + "samples*", + "generated_samples*", + "doc*", + "azure", + "azure.mgmt", +] + +[tool.setuptools.package-data] +pytyped = [ + "py.typed", +] + +[tool.azure-sdk-build] +breaking = false +pyright = false +mypy = false + +[packaging] +package_name = "azure-mgmt-fileshares" +package_nspkg = "azure-mgmt-nspkg" +package_pprint_name = "FileShares Management" +package_doc_id = "" +is_stable = false +is_arm = true +need_msrestazure = false +need_azuremgmtcore = true +sample_link = "" +exclude_folders = "" +title = "FileSharesMgmtClient" diff --git a/sdk/fileshares/azure-mgmt-fileshares/tests/_fs_test_helpers.py b/sdk/fileshares/azure-mgmt-fileshares/tests/_fs_test_helpers.py new file mode 100644 index 000000000000..3c78a9f99a2f --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/tests/_fs_test_helpers.py @@ -0,0 +1,163 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Shared helpers for the azure-mgmt-fileshares hand-written test suite. + +Scenario coverage and parameter values are inspired by the PowerShell +``Az.FileShare`` test suite under +``azure-powershell/src/FileShare/FileShare.Autorest/test``. Implementation is +plain ``pytest`` against the Python SDK; no PowerShell idioms are ported. +""" + +from __future__ import annotations + +import os +import uuid +from typing import Any, Optional + +from azure.core.exceptions import HttpResponseError, ResourceNotFoundError +from azure.mgmt.fileshares import FileSharesMgmtClient +from azure.mgmt.fileshares import models as fs_models + +# --- Configuration -------------------------------------------------------- + +ARM_ENDPOINT = os.environ.get("ARM_ENDPOINT", "https://eastus2euap.management.azure.com") +LOCATION = os.environ.get("FILESHARES_TEST_LOCATION", "eastus2euap") +RESOURCE_GROUP = os.environ.get("FILESHARES_TEST_RG", "mfstest-prod-eastus2euap-crud-2026-06-01-rg") + + +# --- Name generation ------------------------------------------------------ + + +def make_share_name(prefix: str = "fs-azsdk") -> str: + """Return a unique lowercase share name, ≤ 63 chars, valid for the RP.""" + return f"{prefix}-{uuid.uuid4().hex[:10]}" + + +def make_snapshot_name(prefix: str = "snap-azsdk") -> str: + return f"{prefix}-{uuid.uuid4().hex[:10]}" + + +def var_share(variables: dict, key: str, prefix: str = "fs-azsdk") -> str: + """Return a stable share name across record/playback via the proxy ``variables`` dict. + + On Live/Record, generates a fresh uuid-based name and stores it under ``key``. + On Playback, the proxy pre-populates ``variables`` from the recording so the + same name is returned — letting the URI match the recorded request. + """ + return variables.setdefault(key, make_share_name(prefix)) + + +def var_snapshot(variables: dict, key: str, prefix: str = "snap-azsdk") -> str: + return variables.setdefault(key, make_snapshot_name(prefix)) + + +# --- Payload builders ----------------------------------------------------- + + +def build_share_payload( + *, + location: str = LOCATION, + media_tier: str = "SSD", + redundancy: str = "Local", + protocol: str = "NFS", + provisioned_storage_gi_b: int = 100, + provisioned_io_per_sec: int = 3300, + provisioned_throughput_mi_b_per_sec: int = 200, + mount_name: str = "theshare", + nfs_root_squash: Optional[str] = "RootSquash", + public_network_access: Optional[str] = None, + tags: Optional[dict] = None, +) -> fs_models.FileShare: + """Build a FileShare create/update payload exercising the value matrix from + the PowerShell test spec.""" + nfs_props = fs_models.NfsProtocolProperties(root_squash=nfs_root_squash) if nfs_root_squash is not None else None + properties = fs_models.FileShareProperties( + mount_name=mount_name, + media_tier=media_tier, + redundancy=redundancy, + protocol=protocol, + provisioned_storage_gi_b=provisioned_storage_gi_b, + provisioned_io_per_sec=provisioned_io_per_sec, + provisioned_throughput_mi_b_per_sec=provisioned_throughput_mi_b_per_sec, + nfs_protocol_properties=nfs_props, + public_network_access=public_network_access, + ) + return fs_models.FileShare( + location=location, + tags=tags or {"owner": "azsdk-test"}, + properties=properties, + ) + + +def build_share_update( + *, + tags: Optional[dict] = None, + provisioned_storage_gi_b: Optional[int] = None, + provisioned_io_per_sec: Optional[int] = None, + provisioned_throughput_mi_b_per_sec: Optional[int] = None, + public_network_access: Optional[str] = None, +) -> fs_models.FileShareUpdate: + """Build a ``FileShareUpdate`` payload for PATCH.""" + props_kwargs: dict[str, Any] = {} + if provisioned_storage_gi_b is not None: + props_kwargs["provisioned_storage_gi_b"] = provisioned_storage_gi_b + if provisioned_io_per_sec is not None: + props_kwargs["provisioned_io_per_sec"] = provisioned_io_per_sec + if provisioned_throughput_mi_b_per_sec is not None: + props_kwargs["provisioned_throughput_mi_b_per_sec"] = provisioned_throughput_mi_b_per_sec + if public_network_access is not None: + props_kwargs["public_network_access"] = public_network_access + properties = fs_models.FileShareUpdateProperties(**props_kwargs) if props_kwargs else None + return fs_models.FileShareUpdate(tags=tags, properties=properties) + + +def build_snapshot_payload(metadata: Optional[dict] = None) -> fs_models.FileShareSnapshot: + return fs_models.FileShareSnapshot( + properties=fs_models.FileShareSnapshotProperties(metadata=metadata or {}), + ) + + +# --- Cleanup helpers ------------------------------------------------------ + + +def safe_delete_share(client: FileSharesMgmtClient, resource_group_name: str, resource_name: str) -> None: + """Best-effort delete; swallow 404 / generic ARM errors so cleanup never fails a test.""" + try: + client.file_shares.begin_delete( + resource_group_name=resource_group_name, + resource_name=resource_name, + ).result() + except (ResourceNotFoundError, HttpResponseError): + pass + + +def safe_delete_snapshot( + client: FileSharesMgmtClient, + resource_group_name: str, + resource_name: str, + snapshot_name: str, +) -> None: + try: + client.file_share_snapshots.begin_delete_file_share_snapshot( + resource_group_name=resource_group_name, + resource_name=resource_name, + name=snapshot_name, + ).result() + except (ResourceNotFoundError, HttpResponseError): + pass + + +# --- Test base mixin ------------------------------------------------------ + + +def make_client(test_case) -> FileSharesMgmtClient: + """Construct a ``FileSharesMgmtClient`` against the canary ARM endpoint, mirroring + ``test_fileshares_crud.py``.""" + return test_case.create_mgmt_client( + FileSharesMgmtClient, + base_url=ARM_ENDPOINT, + credential_scopes=["https://management.azure.com/.default"], + ) diff --git a/sdk/fileshares/azure-mgmt-fileshares/tests/conftest.py b/sdk/fileshares/azure-mgmt-fileshares/tests/conftest.py new file mode 100644 index 000000000000..80d514ac7df2 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/tests/conftest.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +import os +import pytest +from dotenv import load_dotenv +from devtools_testutils import ( + test_proxy, + add_general_regex_sanitizer, + add_body_key_sanitizer, + add_header_regex_sanitizer, + remove_batch_sanitizers, +) + +load_dotenv() + + +# For security, please avoid recording sensitive identity information in recordings +@pytest.fixture(scope="session", autouse=True) +def add_sanitizers(test_proxy): + fileshares_subscription_id = os.environ.get("AZURE_SUBSCRIPTION_ID", "00000000-0000-0000-0000-000000000000") + fileshares_tenant_id = os.environ.get("AZURE_TENANT_ID", "00000000-0000-0000-0000-000000000000") + fileshares_client_id = os.environ.get("AZURE_CLIENT_ID", "00000000-0000-0000-0000-000000000000") + fileshares_client_secret = os.environ.get("AZURE_CLIENT_SECRET", "00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=fileshares_subscription_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=fileshares_tenant_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=fileshares_client_id, value="00000000-0000-0000-0000-000000000000") + add_general_regex_sanitizer(regex=fileshares_client_secret, value="00000000-0000-0000-0000-000000000000") + + add_header_regex_sanitizer(key="Set-Cookie", value="[set-cookie;]") + add_header_regex_sanitizer(key="Cookie", value="cookie;") + add_body_key_sanitizer(json_path="$..access_token", value="access_token") + + # Preserve resource names and IDs in recorded response bodies — tests assert on them + # and use the variables API to keep names stable across record/playback. + # - AZSDK3493: $..name body key sanitizer + # - AZSDK3430: $..id body key sanitizer + remove_batch_sanitizers(["AZSDK3493", "AZSDK3430"]) diff --git a/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_complex_scenarios.py b/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_complex_scenarios.py new file mode 100644 index 000000000000..8dadeefbff9d --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_complex_scenarios.py @@ -0,0 +1,123 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Complex scenario tests for ``Microsoft.FileShares``.""" + +from devtools_testutils import AzureMgmtRecordedTestCase, recorded_by_proxy + +from _fs_test_helpers import ( # type: ignore[import-not-found] + RESOURCE_GROUP, + build_share_payload, + build_share_update, + make_client, + safe_delete_share, + var_share, +) + + +class TestFileSharesComplexScenarios(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = make_client(self) + + @recorded_by_proxy + def test_bulk_create_list_update_delete(self, variables): + bulk_count = 3 + names = [var_share(variables, f"share_name_{i}", f"bulk-{i}") for i in range(bulk_count)] + try: + for i, name in enumerate(names): + created = self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + resource=build_share_payload( + provisioned_storage_gi_b=100 + (i * 50), + tags={"bulk": "true", "index": str(i)}, + ), + ).result() + assert created.name == name + assert created.properties.provisioning_state == "Succeeded" + + listed_names = {s.name for s in self.client.file_shares.list_by_parent(resource_group_name=RESOURCE_GROUP)} + for name in names: + assert name in listed_names + + for name in names: + updated = self.client.file_shares.begin_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + properties=build_share_update(tags={"bulk": "true", "stage": "updated"}), + ).result() + assert updated.tags.get("stage") == "updated" + finally: + for name in names: + safe_delete_share(self.client, RESOURCE_GROUP, name) + return variables + + @recorded_by_proxy + def test_nfs_root_squash_variants(self, variables): + for squash in ("RootSquash", "NoRootSquash", "AllSquash"): + name = var_share(variables, f"share_name_{squash}", f"squash-{squash.lower()}") + try: + created = self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + resource=build_share_payload( + nfs_root_squash=squash, + tags={"squash": squash}, + ), + ).result() + assert created.properties.nfs_protocol_properties is not None + assert created.properties.nfs_protocol_properties.root_squash == squash + finally: + safe_delete_share(self.client, RESOURCE_GROUP, name) + return variables + + @recorded_by_proxy + def test_redundancy_variants(self, variables): + for redundancy in ("Local", "Zone"): + name = var_share(variables, f"share_name_{redundancy}", f"redund-{redundancy.lower()}") + try: + created = self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + resource=build_share_payload( + redundancy=redundancy, + tags={"redundancy": redundancy}, + ), + ).result() + assert created.properties.redundancy == redundancy + finally: + safe_delete_share(self.client, RESOURCE_GROUP, name) + return variables + + @recorded_by_proxy + def test_scale_up_via_update(self, variables): + name = var_share(variables, "share_name", "scaleup") + try: + self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + resource=build_share_payload( + provisioned_storage_gi_b=100, + provisioned_io_per_sec=3300, + provisioned_throughput_mi_b_per_sec=200, + ), + ).result() + + updated = self.client.file_shares.begin_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + properties=build_share_update( + provisioned_storage_gi_b=200, + provisioned_io_per_sec=4000, + provisioned_throughput_mi_b_per_sec=300, + ), + ).result() + assert updated.properties.provisioning_state == "Succeeded" + assert updated.properties.provisioned_storage_gi_b == 200 + assert updated.properties.provisioned_io_per_sec == 4000 + assert updated.properties.provisioned_throughput_mi_b_per_sec == 300 + finally: + safe_delete_share(self.client, RESOURCE_GROUP, name) + return variables diff --git a/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_crud.py b/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_crud.py new file mode 100644 index 000000000000..bba54f4e0f8f --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_crud.py @@ -0,0 +1,122 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""End-to-end CRUD test for the Microsoft.FileShares RP. + +Creates a file share, gets it, updates a tag, lists by parent (resource group), +and finally deletes it. Targets the public East US region. +""" + +import os +import uuid + +import pytest +from azure.mgmt.fileshares import FileSharesMgmtClient +from azure.mgmt.fileshares import models as fs_models + +from devtools_testutils import AzureMgmtRecordedTestCase, recorded_by_proxy + +ARM_ENDPOINT = os.environ.get("ARM_ENDPOINT", "https://management.azure.com") + +# Pre-existing resource group provisioned for CRUD testing in East US. +# Override via env var if you want to point the test at a different RG. +RESOURCE_GROUP = os.environ.get("FILESHARES_TEST_RG", "sdk-python-eastus-fileshares-crud-rg") +LOCATION = os.environ.get("FILESHARES_TEST_LOCATION", "eastus") + + +def _build_payload(location: str) -> fs_models.FileShare: + """Build a minimal-but-valid FileShare payload (mirrors the sample GET response).""" + # Values mirror the Az.FileShare PowerShell CRUD test + # (src/FileShare/FileShare.Autorest/test/FileShare-CRUD.Tests.ps1). + return fs_models.FileShare( + location=location, + tags={"lifecycle": "crud", "test": "nfs", "owner": "azsdk-crud-test"}, + properties=fs_models.FileShareProperties( + mount_name="theshare", + media_tier="SSD", + redundancy="Local", + protocol="NFS", + provisioned_storage_gi_b=1024, + provisioned_io_per_sec=4024, + provisioned_throughput_mi_b_per_sec=228, + nfs_protocol_properties=fs_models.NfsProtocolProperties(root_squash="NoRootSquash"), + ), + ) + + +class TestFileSharesCrud(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client( + FileSharesMgmtClient, + base_url=ARM_ENDPOINT, + credential_scopes=["https://management.azure.com/.default"], + ) + + # No proxy recording is committed for this test yet, so it only runs when + # AZURE_TEST_RUN_LIVE=true. Once a recording is captured and pushed to the + # assets repo, this marker can be removed. + @pytest.mark.live_test_only + @recorded_by_proxy + def test_file_share_crud(self): + # Use a short, unique name per run so concurrent test runs don't collide. + # Service requires lowercase letters/digits/hyphens; keep it under 63 chars. + share_name = f"fs-azsdk-{uuid.uuid4().hex[:10]}" + + # ---------- CREATE ---------- + created = self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=share_name, + resource=_build_payload(LOCATION), + ).result() + + assert created is not None + assert created.name == share_name + assert created.location.lower() == LOCATION.lower() + assert created.properties is not None + assert created.properties.provisioning_state == "Succeeded" + assert created.properties.protocol == "NFS" + assert created.properties.media_tier == "SSD" + assert created.properties.provisioned_storage_gi_b == 1024 + + try: + # ---------- GET ---------- + got = self.client.file_shares.get( + resource_group_name=RESOURCE_GROUP, + resource_name=share_name, + ) + assert got.name == share_name + assert got.tags.get("lifecycle") == "crud" + assert got.properties.host_name # populated by service + + # ---------- UPDATE (tag only) ---------- + updated = self.client.file_shares.begin_update( + resource_group_name=RESOURCE_GROUP, + resource_name=share_name, + properties=fs_models.FileShareUpdate( + tags={"lifecycle": "crud", "test": "nfs", "updated": "true", "version": "2"}, + ), + ).result() + assert updated.tags.get("updated") == "true" + assert updated.tags.get("version") == "2" + + # ---------- LIST ---------- + listed = list(self.client.file_shares.list_by_parent(resource_group_name=RESOURCE_GROUP)) + assert any(s.name == share_name for s in listed) + + finally: + # ---------- DELETE ---------- + self.client.file_shares.begin_delete( + resource_group_name=RESOURCE_GROUP, + resource_name=share_name, + ).result() + + # Verify gone (404) + from azure.core.exceptions import ResourceNotFoundError + + with pytest.raises(ResourceNotFoundError): + self.client.file_shares.get( + resource_group_name=RESOURCE_GROUP, + resource_name=share_name, + ) diff --git a/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_edge_cases.py b/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_edge_cases.py new file mode 100644 index 000000000000..488d5fb8ee8b --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_edge_cases.py @@ -0,0 +1,137 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Edge-case tests for ``Microsoft.FileShares``. + +Scenarios mirrored from +``azure-powershell/src/FileShare/FileShare.Autorest/test/FileShare-EdgeCases.Tests.ps1``: +name-length boundaries, name characters, tag count and value shapes, network-access +toggles. PowerShell-only checks (``-InputObject`` identity) are intentionally omitted. +""" + +import uuid + +from devtools_testutils import AzureMgmtRecordedTestCase, recorded_by_proxy + +from _fs_test_helpers import ( # type: ignore[import-not-found] + RESOURCE_GROUP, + build_share_payload, + build_share_update, + make_client, + safe_delete_share, + var_share, +) + + +class TestFileSharesEdgeCases(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = make_client(self) + + @recorded_by_proxy + def test_min_length_name(self, variables): + """Service-acceptable minimum name length (3 chars). Fixed name (no uuid).""" + name = "fsa" + try: + created = self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + resource=build_share_payload(), + ).result() + assert created.name == name + finally: + safe_delete_share(self.client, RESOURCE_GROUP, name) + return variables + + @recorded_by_proxy + def test_max_length_name(self, variables): + """Maximum length name (63 chars), lowercase letters + digits.""" + default = ("fs" + uuid.uuid4().hex + uuid.uuid4().hex)[:63] + name = variables.setdefault("share_name", default) + assert len(name) == 63 + try: + created = self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + resource=build_share_payload(), + ).result() + assert created.name == name + finally: + safe_delete_share(self.client, RESOURCE_GROUP, name) + return variables + + @recorded_by_proxy + def test_name_with_hyphens_and_digits(self, variables): + """Hyphens and digits are valid name characters.""" + name = var_share(variables, "share_name", "share-123-edge-456") + try: + created = self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + resource=build_share_payload(), + ).result() + assert created.name == name + finally: + safe_delete_share(self.client, RESOURCE_GROUP, name) + return variables + + @recorded_by_proxy + def test_many_tags(self, variables): + """A reasonably large tag set round-trips.""" + many = {f"tag{i}": f"value{i}" for i in range(15)} + name = var_share(variables, "share_name", "manytags") + try: + self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + resource=build_share_payload(tags=many), + ).result() + updated = self.client.file_shares.begin_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + properties=build_share_update(tags=many), + ).result() + assert len(updated.tags) >= len(many) + for key, value in many.items(): + assert updated.tags.get(key) == value + finally: + safe_delete_share(self.client, RESOURCE_GROUP, name) + return variables + + @recorded_by_proxy + def test_tags_with_special_characters(self, variables): + """Tag values with email/path/version-style strings round-trip unchanged.""" + special = { + "email": "test@example.com", + "path": "/var/log/app", + "version": "1.0.0-beta+build.123", + } + name = var_share(variables, "share_name", "specialtags") + try: + created = self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + resource=build_share_payload(tags=special), + ).result() + assert created.tags.get("email") == "test@example.com" + assert created.tags.get("path") == "/var/log/app" + assert created.tags.get("version") == "1.0.0-beta+build.123" + finally: + safe_delete_share(self.client, RESOURCE_GROUP, name) + return variables + + @recorded_by_proxy + def test_public_network_access_disabled_at_create(self, variables): + """Create a share with ``PublicNetworkAccess=Disabled``.""" + name = var_share(variables, "share_name", "pnadisabled") + try: + created = self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + resource=build_share_payload(public_network_access="Disabled"), + ).result() + assert created.properties.public_network_access == "Disabled" + finally: + safe_delete_share(self.client, RESOURCE_GROUP, name) + return variables diff --git a/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_informational.py b/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_informational.py new file mode 100644 index 000000000000..a70f28fc4bef --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_informational.py @@ -0,0 +1,94 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Informational-operations tests for ``Microsoft.FileShares``.""" + +from azure.mgmt.fileshares import models as fs_models + +from devtools_testutils import AzureMgmtRecordedTestCase, recorded_by_proxy + +from _fs_test_helpers import ( # type: ignore[import-not-found] + LOCATION, + RESOURCE_GROUP, + build_share_payload, + make_client, + safe_delete_share, + var_share, +) + + +class TestFileSharesInformational(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = make_client(self) + + @recorded_by_proxy + def test_get_limits(self): + response = self.client.informational_operations.get_limits(location=LOCATION) + assert response is not None + assert response.properties is not None + + @recorded_by_proxy + def test_get_usage_data(self): + response = self.client.informational_operations.get_usage_data(location=LOCATION) + assert response is not None + assert response.properties is not None + + @recorded_by_proxy + def test_get_provisioning_recommendation(self): + request = fs_models.FileShareProvisioningRecommendationRequest( + properties=fs_models.FileShareProvisioningRecommendationInput( + provisioned_storage_gi_b=1000, + ), + ) + response = self.client.informational_operations.get_provisioning_recommendation( + location=LOCATION, + body=request, + ) + assert response is not None + assert response.properties is not None + assert response.properties.provisioned_io_per_sec > 0 + assert response.properties.provisioned_throughput_mi_b_per_sec > 0 + assert len(response.properties.available_redundancy_options) > 0 + + @recorded_by_proxy + def test_check_name_availability_for_unique_name(self, variables): + name = var_share(variables, "share_name", "unique-never-created") + request = fs_models.CheckNameAvailabilityRequest( + name=name, + type="Microsoft.FileShares/fileShares", + ) + response = self.client.file_shares.check_name_availability( + location=LOCATION, + body=request, + ) + assert response is not None + assert response.name_available is True + return variables + + @recorded_by_proxy + def test_check_name_availability_for_taken_name(self, variables): + """Create a share, then assert its name is reported as not available.""" + name = var_share(variables, "share_name", "taken") + try: + self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + resource=build_share_payload(), + ).result() + + request = fs_models.CheckNameAvailabilityRequest( + name=name, + type="Microsoft.FileShares/fileShares", + ) + response = self.client.file_shares.check_name_availability( + location=LOCATION, + body=request, + ) + assert response is not None + assert response.name_available is False + assert response.reason is not None + finally: + safe_delete_share(self.client, RESOURCE_GROUP, name) + return variables diff --git a/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_listing.py b/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_listing.py new file mode 100644 index 000000000000..27597c13e90d --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_listing.py @@ -0,0 +1,54 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Listing tests for ``Microsoft.FileShares`` covering both the resource-group and +subscription-scoped pagers.""" + +from devtools_testutils import AzureMgmtRecordedTestCase, recorded_by_proxy + +from _fs_test_helpers import ( # type: ignore[import-not-found] + RESOURCE_GROUP, + build_share_payload, + make_client, + safe_delete_share, + var_share, +) + + +class TestFileSharesListing(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = make_client(self) + + @recorded_by_proxy + def test_list_by_subscription_includes_created_share(self, variables): + name = var_share(variables, "share_name", "listsub") + try: + self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + resource=build_share_payload(), + ).result() + + listed_names = {s.name for s in self.client.file_shares.list_by_subscription()} + assert name in listed_names + finally: + safe_delete_share(self.client, RESOURCE_GROUP, name) + return variables + + @recorded_by_proxy + def test_list_by_parent_returns_pager(self, variables): + name = var_share(variables, "share_name", "listrg") + try: + self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + resource=build_share_payload(), + ).result() + + shares = list(self.client.file_shares.list_by_parent(resource_group_name=RESOURCE_GROUP)) + assert any(s.name == name for s in shares) + finally: + safe_delete_share(self.client, RESOURCE_GROUP, name) + return variables diff --git a/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_negative.py b/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_negative.py new file mode 100644 index 000000000000..41166566b8fa --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_negative.py @@ -0,0 +1,134 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Negative / error-path tests for ``Microsoft.FileShares``.""" + +import pytest +from azure.core.exceptions import HttpResponseError, ResourceNotFoundError + +from devtools_testutils import AzureMgmtRecordedTestCase, recorded_by_proxy + +from _fs_test_helpers import ( # type: ignore[import-not-found] + RESOURCE_GROUP, + build_share_payload, + build_share_update, + build_snapshot_payload, + make_client, + var_share, +) + + +class TestFileSharesNegative(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = make_client(self) + + @recorded_by_proxy + def test_get_nonexistent_share_raises_404(self, variables): + name = var_share(variables, "share_name", "missing") + with pytest.raises(ResourceNotFoundError): + self.client.file_shares.get( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + ) + return variables + + @recorded_by_proxy + def test_get_nonexistent_resource_group_raises(self, variables): + name = var_share(variables, "share_name", "missing") + with pytest.raises((ResourceNotFoundError, HttpResponseError)): + self.client.file_shares.get( + resource_group_name="rg-does-not-exist-azsdk-test", + resource_name=name, + ) + return variables + + @recorded_by_proxy + def test_update_nonexistent_share_raises(self, variables): + name = var_share(variables, "share_name", "missing") + with pytest.raises((ResourceNotFoundError, HttpResponseError)): + self.client.file_shares.begin_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + properties=build_share_update(tags={"should": "fail"}), + ).result() + return variables + + @recorded_by_proxy + def test_create_invalid_location_raises(self, variables): + name = var_share(variables, "share_name", "badloc") + with pytest.raises(HttpResponseError): + self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + resource=build_share_payload(location="invalidlocation123"), + ).result() + return variables + + @recorded_by_proxy + def test_create_invalid_media_tier_raises(self, variables): + name = var_share(variables, "share_name", "badtier") + with pytest.raises(HttpResponseError): + self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + resource=build_share_payload(media_tier="InvalidTier"), + ).result() + return variables + + @recorded_by_proxy + def test_create_invalid_protocol_raises(self, variables): + name = var_share(variables, "share_name", "badproto") + with pytest.raises(HttpResponseError): + self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + resource=build_share_payload(protocol="InvalidProtocol"), + ).result() + return variables + + @recorded_by_proxy + def test_create_invalid_redundancy_raises(self, variables): + name = var_share(variables, "share_name", "badredund") + with pytest.raises(HttpResponseError): + self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + resource=build_share_payload(redundancy="InvalidRedundancy"), + ).result() + return variables + + @recorded_by_proxy + def test_create_zero_storage_raises(self, variables): + name = var_share(variables, "share_name", "zerostorage") + with pytest.raises(HttpResponseError): + self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + resource=build_share_payload(provisioned_storage_gi_b=0), + ).result() + return variables + + @recorded_by_proxy + def test_create_negative_storage_raises(self, variables): + name = var_share(variables, "share_name", "negstorage") + with pytest.raises(HttpResponseError): + self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + resource=build_share_payload(provisioned_storage_gi_b=-100), + ).result() + return variables + + @recorded_by_proxy + def test_snapshot_create_for_nonexistent_share_raises(self, variables): + name = var_share(variables, "share_name", "missingparent") + with pytest.raises((ResourceNotFoundError, HttpResponseError)): + self.client.file_share_snapshots.begin_create_or_update_file_share_snapshot( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + name="orphan-snapshot", + resource=build_snapshot_payload(), + ).result() + return variables diff --git a/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_operations.py b/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_operations.py new file mode 100644 index 000000000000..cf04eabfe7f8 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_operations.py @@ -0,0 +1,40 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +import os + +import pytest +from azure.mgmt.fileshares import FileSharesMgmtClient + +from devtools_testutils import AzureMgmtRecordedTestCase, recorded_by_proxy + +# Per-package overrides. Values are loaded from +# `sdk/fileshares/azure-mgmt-fileshares/.env` by tests/conftest.py (load_dotenv). +ARM_ENDPOINT = os.environ.get("ARM_ENDPOINT", "https://eastus2euap.management.azure.com") + + +class TestFileSharesOperations(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = self.create_mgmt_client( + FileSharesMgmtClient, + base_url=ARM_ENDPOINT, + # Canary ARM endpoints accept tokens issued for the global ARM resource. + credential_scopes=["https://management.azure.com/.default"], + ) + + # No proxy recording is committed for this test yet, so it only runs when + # AZURE_TEST_RUN_LIVE=true. Once a recording is captured and pushed to the + # assets repo, this marker can be removed. + @pytest.mark.live_test_only + @recorded_by_proxy + def test_operations_list(self): + # `operations.list()` is a tenant-scope ARM call that needs no resource group + # or pre-existing resources, making it ideal for a smoke test. + response = self.client.operations.list() + result = [r for r in response] + assert len(result) > 0 + # Every ARM operation should expose a name like "Microsoft.FileShares/..." + for op in result: + assert op.name and op.name.startswith("Microsoft.FileShares/") diff --git a/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_private_endpoint.py b/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_private_endpoint.py new file mode 100644 index 000000000000..50cb1195d21c --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_private_endpoint.py @@ -0,0 +1,72 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Private-endpoint surface tests for ``Microsoft.FileShares``. + +Inspired by the read-only portions of +``azure-powershell/src/FileShare/FileShare.Autorest/test/FileShare-PrivateEndpoint.Tests.ps1``. +VNet/Subnet/PE provisioning belongs to ``azure-mgmt-network`` and is intentionally +excluded — these tests cover only the FileShares-side PE/private-link surface. +""" + +from devtools_testutils import AzureMgmtRecordedTestCase, recorded_by_proxy + +from _fs_test_helpers import ( # type: ignore[import-not-found] + RESOURCE_GROUP, + build_share_payload, + make_client, + safe_delete_share, + var_share, +) + + +class TestFileSharesPrivateEndpoint(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = make_client(self) + + @recorded_by_proxy + def test_list_private_link_resources(self, variables): + name = var_share(variables, "share_name", "plr") + try: + self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + resource=build_share_payload(), + ).result() + + resources = list( + self.client.private_link_resources.list( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + ) + ) + assert len(resources) > 0 + for plr in resources: + assert plr.name + assert plr.properties is not None + finally: + safe_delete_share(self.client, RESOURCE_GROUP, name) + return variables + + @recorded_by_proxy + def test_list_private_endpoint_connections_empty(self, variables): + name = var_share(variables, "share_name", "pec-empty") + try: + self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + resource=build_share_payload(), + ).result() + + connections = list( + self.client.private_endpoint_connections.list_by_file_share( + resource_group_name=RESOURCE_GROUP, + resource_name=name, + ) + ) + assert connections == [] + finally: + safe_delete_share(self.client, RESOURCE_GROUP, name) + return variables diff --git a/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_snapshots.py b/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_snapshots.py new file mode 100644 index 000000000000..b5d2c85c1b07 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/tests/test_fileshares_snapshots.py @@ -0,0 +1,115 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# -------------------------------------------------------------------------- +"""Snapshot tests for ``Microsoft.FileShares``.""" + +import pytest +from azure.core.exceptions import ResourceNotFoundError + +from devtools_testutils import AzureMgmtRecordedTestCase, recorded_by_proxy + +from _fs_test_helpers import ( # type: ignore[import-not-found] + RESOURCE_GROUP, + build_share_payload, + build_snapshot_payload, + make_client, + safe_delete_share, + safe_delete_snapshot, + var_share, + var_snapshot, +) + + +class TestFileSharesSnapshots(AzureMgmtRecordedTestCase): + def setup_method(self, method): + self.client = make_client(self) + + @recorded_by_proxy + def test_snapshot_crud(self, variables): + share_name = var_share(variables, "share_name", "snap-parent") + snapshot_name = var_snapshot(variables, "snapshot_name", "snap") + try: + self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=share_name, + resource=build_share_payload(), + ).result() + + try: + created = self.client.file_share_snapshots.begin_create_or_update_file_share_snapshot( + resource_group_name=RESOURCE_GROUP, + resource_name=share_name, + name=snapshot_name, + resource=build_snapshot_payload(metadata={"purpose": "testing"}), + ).result() + assert created.name == snapshot_name + assert created.properties is not None + assert created.properties.metadata.get("purpose") == "testing" + + got = self.client.file_share_snapshots.get_file_share_snapshot( + resource_group_name=RESOURCE_GROUP, + resource_name=share_name, + name=snapshot_name, + ) + assert got.name == snapshot_name + + listed = list( + self.client.file_share_snapshots.list_by_file_share( + resource_group_name=RESOURCE_GROUP, + resource_name=share_name, + ) + ) + assert any(s.name == snapshot_name for s in listed) + + self.client.file_share_snapshots.begin_delete_file_share_snapshot( + resource_group_name=RESOURCE_GROUP, + resource_name=share_name, + name=snapshot_name, + ).result() + with pytest.raises(ResourceNotFoundError): + self.client.file_share_snapshots.get_file_share_snapshot( + resource_group_name=RESOURCE_GROUP, + resource_name=share_name, + name=snapshot_name, + ) + finally: + safe_delete_snapshot(self.client, RESOURCE_GROUP, share_name, snapshot_name) + finally: + safe_delete_share(self.client, RESOURCE_GROUP, share_name) + return variables + + @recorded_by_proxy + def test_multiple_snapshots_listing(self, variables): + share_name = var_share(variables, "share_name", "multisnap-parent") + snapshot_names = [var_snapshot(variables, f"snapshot_name_{i}", f"snap-{i}") for i in range(3)] + try: + self.client.file_shares.begin_create_or_update( + resource_group_name=RESOURCE_GROUP, + resource_name=share_name, + resource=build_share_payload(), + ).result() + + for i, snap in enumerate(snapshot_names): + self.client.file_share_snapshots.begin_create_or_update_file_share_snapshot( + resource_group_name=RESOURCE_GROUP, + resource_name=share_name, + name=snap, + resource=build_snapshot_payload(metadata={"generation": str(i)}), + ).result() + + listed = { + s.name + for s in self.client.file_share_snapshots.list_by_file_share( + resource_group_name=RESOURCE_GROUP, + resource_name=share_name, + ) + } + for snap in snapshot_names: + assert snap in listed + finally: + for snap in snapshot_names: + safe_delete_snapshot(self.client, RESOURCE_GROUP, share_name, snap) + safe_delete_share(self.client, RESOURCE_GROUP, share_name) + return variables diff --git a/sdk/fileshares/azure-mgmt-fileshares/tsp-location.yaml b/sdk/fileshares/azure-mgmt-fileshares/tsp-location.yaml new file mode 100644 index 000000000000..18efe3440335 --- /dev/null +++ b/sdk/fileshares/azure-mgmt-fileshares/tsp-location.yaml @@ -0,0 +1,4 @@ +directory: specification/fileshares/resource-manager/Microsoft.FileShares/FileShares +commit: 75ca22c5387184fb1f0c3b115ed22a006813800d +repo: Azure/azure-rest-api-specs +additionalDirectories: diff --git a/sdk/fileshares/ci.yml b/sdk/fileshares/ci.yml new file mode 100644 index 000000000000..10b44280637b --- /dev/null +++ b/sdk/fileshares/ci.yml @@ -0,0 +1,34 @@ +# DO NOT EDIT THIS FILE +# This file is generated automatically and any changes will be lost. + +trigger: + branches: + include: + - main + - hotfix/* + - release/* + - restapi* + paths: + include: + - sdk/fileshares/ + +pr: + branches: + include: + - main + - feature/* + - hotfix/* + - release/* + - restapi* + paths: + include: + - sdk/fileshares/ + +extends: + template: ../../eng/pipelines/templates/stages/archetype-sdk-client.yml + parameters: + ServiceDirectory: fileshares + TestProxy: true + Artifacts: + - name: azure-mgmt-fileshares + safeName: azuremgmtfileshares From 7488c67671b2acf340cd471f06959dd18e744d4f Mon Sep 17 00:00:00 2001 From: Xinran Date: Thu, 7 May 2026 03:58:33 +0000 Subject: [PATCH 07/10] Enhance test configurations for OpenTelemetry and update tracing assertions - Disable OpenTelemetry metrics exporter during tests to prevent noisy tracebacks. - Ensure clean shutdown of OpenTelemetry providers in pytest. - Update APPLICATIONINSIGHTS_CONNECTION_STRING in tests to a placeholder value. - Modify logging levels in tests to capture warnings instead of errors. - Adjust assertions in tracing tests to reflect expected behavior when tracing is not configured. --- .../tests/conftest.py | 33 ++++++++++++++++ .../tests/test_ws_graceful_shutdown.py | 2 +- .../tests/test_ws_span_parenting.py | 8 ++-- .../tests/test_ws_tracing.py | 38 +++++++++++-------- 4 files changed, 60 insertions(+), 21 deletions(-) diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/tests/conftest.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/conftest.py index 4576297cda2a..2afa44b59165 100644 --- a/sdk/agentserver/azure-ai-agentserver-invocations/tests/conftest.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/conftest.py @@ -4,6 +4,17 @@ """Shared fixtures and factory functions for invocations tests.""" import json import os + +# Disable the OpenTelemetry metrics console exporter during tests. The +# microsoft-opentelemetry distro otherwise spawns a PeriodicExportingMetricReader +# background thread that writes to stdout every minute; once pytest closes its +# captured stdout at session end, the thread emits a noisy +# "ValueError: I/O operation on closed file." traceback. Setting +# OTEL_METRICS_EXPORTER=none before any agentserver code is imported prevents +# the exporter from being installed in the first place. +os.environ.setdefault("OTEL_METRICS_EXPORTER", "none") +os.environ.setdefault("OTEL_LOGS_EXPORTER", "none") + from typing import Any import pytest @@ -34,6 +45,28 @@ def pytest_configure(config): config.addinivalue_line("markers", "tracing_e2e: end-to-end tracing tests against live Application Insights") +def pytest_unconfigure(config): # pylint: disable=unused-argument + """Shut down OpenTelemetry providers cleanly before pytest tears down stdout. + + Without this, background threads in the meter / logger / tracer providers + can attempt to flush to a closed stdout stream and emit + ``ValueError: I/O operation on closed file.`` tracebacks. + """ + for mod_name, getter in ( + ("opentelemetry.metrics", "get_meter_provider"), + ("opentelemetry._logs", "get_logger_provider"), + ("opentelemetry.trace", "get_tracer_provider"), + ): + try: + mod = __import__(mod_name, fromlist=[getter]) + provider = getattr(mod, getter)() + shutdown = getattr(provider, "shutdown", None) + if callable(shutdown): + shutdown() + except Exception: # pylint: disable=broad-exception-caught + pass + + # --------------------------------------------------------------------------- # E2E tracing fixtures # --------------------------------------------------------------------------- diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_graceful_shutdown.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_graceful_shutdown.py index c3f8412c0a57..a22e5d5eacea 100644 --- a/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_graceful_shutdown.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_graceful_shutdown.py @@ -136,7 +136,7 @@ async def handle(payload: dict, context: InvocationWSContext) -> dict: async def on_shutdown(): raise RuntimeError("shutdown exploded") - with caplog.at_level(logging.ERROR, logger="azure.ai.agentserver"): + with caplog.at_level(logging.WARNING, logger="azure.ai.agentserver"): await _drive_lifespan(app) assert any("on_shutdown" in r.message.lower() or "error" in r.message.lower() for r in caplog.records) diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_span_parenting.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_span_parenting.py index dbf2a774340a..a5c272802df7 100644 --- a/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_span_parenting.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_span_parenting.py @@ -54,8 +54,8 @@ def _get_spans(): def _make_server_with_child_span(): """Server whose handler creates a child span (simulating a framework).""" - with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): - with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): + with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=00000000-0000-0000-0000-000000000000"}): + with patch("azure.ai.agentserver.core._tracing._setup_distro_export", create=True): app = InvocationWSAgentServerHost() child_tracer = trace.get_tracer("test.framework") @@ -69,8 +69,8 @@ async def handle(payload: dict, context: InvocationWSContext) -> dict: def _make_streaming_server_with_child_span(): """Server with streaming response whose handler creates a child span.""" - with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): - with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): + with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=00000000-0000-0000-0000-000000000000"}): + with patch("azure.ai.agentserver.core._tracing._setup_distro_export", create=True): app = InvocationWSAgentServerHost() child_tracer = trace.get_tracer("test.framework") diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_tracing.py b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_tracing.py index 99c1b42742d7..92b705023099 100644 --- a/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_tracing.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/tests/test_ws_tracing.py @@ -66,8 +66,8 @@ def _get_spans(): def _make_tracing_server(**kwargs): """Create an InvocationWSAgentServerHost with tracing enabled.""" - with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): - with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): + with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=00000000-0000-0000-0000-000000000000"}): + with patch("azure.ai.agentserver.core._tracing._setup_distro_export", create=True): server = InvocationWSAgentServerHost(**kwargs) @server.ws_invoke_handler @@ -79,8 +79,8 @@ async def handle(payload: dict, context: InvocationWSContext) -> dict: def _make_tracing_server_with_get_cancel(**kwargs): """Create a tracing-enabled server with get/cancel handlers.""" - with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): - with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): + with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=00000000-0000-0000-0000-000000000000"}): + with patch("azure.ai.agentserver.core._tracing._setup_distro_export", create=True): server = InvocationWSAgentServerHost(**kwargs) store: dict[str, dict] = {} @@ -108,8 +108,8 @@ async def cancel_handler(context: InvocationWSContext) -> dict: def _make_failing_tracing_server(**kwargs): """Create a tracing-enabled server whose handler raises.""" - with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): - with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): + with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=00000000-0000-0000-0000-000000000000"}): + with patch("azure.ai.agentserver.core._tracing._setup_distro_export", create=True): server = InvocationWSAgentServerHost(**kwargs) @server.ws_invoke_handler @@ -121,8 +121,8 @@ async def handle(payload: dict, context: InvocationWSContext) -> dict: def _make_streaming_tracing_server(**kwargs): """Create a tracing-enabled server with streaming response.""" - with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): - with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): + with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=00000000-0000-0000-0000-000000000000"}): + with patch("azure.ai.agentserver.core._tracing._setup_distro_export", create=True): server = InvocationWSAgentServerHost(**kwargs) @server.ws_invoke_handler @@ -138,7 +138,7 @@ async def handle(payload: dict, context: InvocationWSContext): # --------------------------------------------------------------------------- def test_ws_tracing_disabled_by_default(): - """No spans are created when tracing is not enabled.""" + """Invoke spans are still created by the global tracer when tracing is not explicitly configured.""" if _MODULE_EXPORTER: _MODULE_EXPORTER.clear() @@ -153,9 +153,12 @@ async def handle(payload: dict, context: InvocationWSContext) -> dict: ws.send_json({"action": "invoke", "payload": {}}) ws.receive_json() + # With the function-based tracing design, spans are always created + # when OTel is installed (via the global tracer). The difference is + # whether exporters are configured. Verify a span IS created. spans = _get_spans() invoke_spans = [s for s in spans if "invoke_agent" in s.name] - assert len(invoke_spans) == 0 + assert len(invoke_spans) >= 1 # --------------------------------------------------------------------------- @@ -240,8 +243,8 @@ def test_ws_cancel_invocation_creates_span(): def test_ws_tracing_via_appinsights_env_var(): """Tracing is enabled when APPLICATIONINSIGHTS_CONNECTION_STRING is set.""" - with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=test"}): - with patch("azure.ai.agentserver.core._tracing.TracingHelper._setup_azure_monitor"): + with patch.dict(os.environ, {"APPLICATIONINSIGHTS_CONNECTION_STRING": "InstrumentationKey=00000000-0000-0000-0000-000000000000"}): + with patch("azure.ai.agentserver.core._tracing._setup_distro_export", create=True): app = InvocationWSAgentServerHost() @app.ws_invoke_handler @@ -263,7 +266,8 @@ async def handle(payload: dict, context: InvocationWSContext) -> dict: # --------------------------------------------------------------------------- def test_ws_no_tracing_when_no_endpoints(): - """Tracing is disabled when no connection string or OTLP endpoint is set.""" + """When no connection string or OTLP endpoint is set, configure_observability + still runs (for console logging) but tracing spans are not exported.""" env = os.environ.copy() env.pop("APPLICATIONINSIGHTS_CONNECTION_STRING", None) env.pop("OTEL_EXPORTER_OTLP_ENDPOINT", None) @@ -282,9 +286,11 @@ async def handle(payload: dict, context: InvocationWSContext) -> dict: ws.send_json({"action": "invoke", "payload": {}}) ws.receive_json() + # Spans are still created via the global tracer — the difference + # is no exporters are configured to send them anywhere. spans = _get_spans() invoke_spans = [s for s in spans if "invoke_agent" in s.name] - assert len(invoke_spans) == 0 + assert len(invoke_spans) >= 1 # --------------------------------------------------------------------------- @@ -335,7 +341,7 @@ def test_ws_genai_attributes_on_invoke_span(): # --------------------------------------------------------------------------- def test_ws_session_id_in_invocation_id(): - """Session ID is set as gen_ai.conversation.id on invoke span.""" + """Session ID is set as microsoft.session.id on invoke span.""" server = _make_tracing_server() client = TestClient(server) with client.websocket_connect("/invocations_ws/ws") as ws: @@ -350,4 +356,4 @@ def test_ws_session_id_in_invocation_id(): invoke_spans = [s for s in spans if "invoke_agent" in s.name] assert len(invoke_spans) >= 1 attrs = dict(invoke_spans[0].attributes) - assert attrs.get("gen_ai.conversation.id") == "test-session" + assert attrs.get("microsoft.session.id") == "test-session" From 5fb181447b46ee9310011d3546ae02e990c7f857 Mon Sep 17 00:00:00 2001 From: Xinran Date: Thu, 7 May 2026 06:08:19 +0000 Subject: [PATCH 08/10] fix format --- sdk/agentserver/azure-ai-agentserver-invocations/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/README.md b/sdk/agentserver/azure-ai-agentserver-invocations/README.md index b76cbe2c4277..4b54bfef0162 100644 --- a/sdk/agentserver/azure-ai-agentserver-invocations/README.md +++ b/sdk/agentserver/azure-ai-agentserver-invocations/README.md @@ -224,7 +224,7 @@ All operations use a single persistent WebSocket connection: All messages are JSON text frames with an `action` field: -```json +```text {"action": "invoke", "payload": {...}, "invocation_id": "optional", "session_id": "optional"} {"action": "get_invocation", "invocation_id": "required"} {"action": "cancel_invocation", "invocation_id": "required"} @@ -234,7 +234,7 @@ All messages are JSON text frames with an `action` field: ### Server → Client messages -```json +```text {"type": "result", "invocation_id": "...", "session_id": "...", "payload": {...}} {"type": "stream_chunk", "invocation_id": "...", "session_id": "...", "payload": {...}} {"type": "stream_end", "invocation_id": "...", "session_id": "..."} From 2d268d2458f1bcf94cb415c41614a748cb648605 Mon Sep 17 00:00:00 2001 From: Xinran Date: Thu, 7 May 2026 07:07:34 +0000 Subject: [PATCH 09/10] fix format --- .vscode/cspell.json | 4 ++++ .../azure/ai/agentserver/invocations/_invocation_ws.py | 4 ++-- .../samples/streaming_ws_invoke_agent/README.md | 4 ---- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.vscode/cspell.json b/.vscode/cspell.json index bd2e283626d5..b54ec27a238c 100644 --- a/.vscode/cspell.json +++ b/.vscode/cspell.json @@ -2211,6 +2211,10 @@ { "filename": "sdk/agentserver/azure-ai-agentserver-githubcopilot/**", "words": ["RAPI", "BYOK", "byok", "NCUS", "ncusacr", "fstring", "ename", "valeriepham", "coreai", "Vnext", "PYTHONIOENCODING"] + }, + { + "filename": "sdk/agentserver/azure-ai-agentserver-invocations/**", + "words": ["Segoe", "Roboto", "unconfigure"] } ], "allowCompoundWords": true diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_invocation_ws.py b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_invocation_ws.py index 5fd68f4c3c73..88c5ba9c0109 100644 --- a/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_invocation_ws.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_invocation_ws.py @@ -14,9 +14,9 @@ import os import re import uuid -from collections.abc import AsyncGenerator, Awaitable, Callable # pylint: disable=import-error +from collections.abc import Callable # pylint: disable=import-error from dataclasses import dataclass -from typing import Any, Optional, Union +from typing import Any, Optional from starlette.requests import Request from starlette.responses import JSONResponse, Response diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/README.md b/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/README.md index 52a409d37f00..9a37643d7f64 100644 --- a/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/README.md +++ b/sdk/agentserver/azure-ai-agentserver-invocations/samples/streaming_ws_invoke_agent/README.md @@ -89,7 +89,3 @@ To use a different port: ```bash python client.py --port 3000 ``` - -## Deploying to Microsoft Foundry - -To deploy your agent to Microsoft Foundry, follow the deployment guide at https://github.com/microsoft/hosted-agents-vnext-private-preview/blob/main/azd-quickstart.md From 699fc23c9dcf7be83fe0ec42149d2c49fd4c6dcf Mon Sep 17 00:00:00 2001 From: Xinran Date: Thu, 7 May 2026 07:45:31 +0000 Subject: [PATCH 10/10] fix format --- .../azure/ai/agentserver/invocations/_invocation_ws.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_invocation_ws.py b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_invocation_ws.py index 88c5ba9c0109..55274af6963f 100644 --- a/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_invocation_ws.py +++ b/sdk/agentserver/azure-ai-agentserver-invocations/azure/ai/agentserver/invocations/_invocation_ws.py @@ -6,7 +6,7 @@ Provides the invocation protocol over WebSocket long connections as a :class:`~azure.ai.agentserver.core.AgentServerHost` subclass. """ -import asyncio +import asyncio # pylint: disable=do-not-import-asyncio import contextlib import inspect import json