diff --git a/.claude/skills/buildit/SKILL.md b/.claude/skills/buildit/SKILL.md new file mode 100644 index 0000000..4ee157a --- /dev/null +++ b/.claude/skills/buildit/SKILL.md @@ -0,0 +1,22 @@ +--- +name: buildit +description: Implement the next task incrementally — build, test, verify, commit +--- + +Invoke the agent-skills:incremental-implementation skill alongside agent-skills:test-driven-development. + +If uncertain, ask the user where the plan to follow is. + +Pick the next pending task from the plan. +For each task: + +1. Read the task's acceptance criteria +2. Load relevant context (existing code, patterns, types) +3. Write a failing test for the expected behavior (RED) +4. Implement the minimum code to pass the test (GREEN) +5. Run the full test suite to check for regressions +6. Run the build to verify compilation +7. Commit with a descriptive message +8. Mark the task complete and move to the next one + +If any step fails, follow the agent-skills:debugging-and-error-recovery skill. diff --git a/.memsearch/memory/2026-04-26.md b/.memsearch/memory/2026-04-26.md new file mode 100644 index 0000000..719b654 --- /dev/null +++ b/.memsearch/memory/2026-04-26.md @@ -0,0 +1,61 @@ + +## Session 07:30 + +## Session 07:34 + +### 07:34 + + +- User asked Claude Code to address the issues outlined in the Phase 3 code review (phase-3-review.md). +- Claude Code explored the foreman-client package files, drafted an implementation plan, + and received user approval via ExitPlanMode. +- Claude Code edited `client.py` to add `close()`, `__enter__`, and `__exit__` methods to `ForemanClient`, + and updated the docstring example to demonstrate context manager usage. +- Claude Code edited `__init__.py` to import and export `LLMBackendRef` and `TaskContext` in the `__all__` list. +- Claude Code rewrote `test_client.py`: moved `import json` to module level, + removed the misleading comment about call ordering, + and added a new `TestForemanClientLifecycle` class with 3 tests for lifecycle behavior. +- Claude Code ran pytest in the foreman-client directory; all 19 tests passed (16 existing + 3 new). +- Claude Code ran pre-commit across the project; all linting and formatting checks passed. + +## Session 07:37 + +### 07:37 + + +- User asked Claude Code to update progress in the Phase 3 plan document and mark the human review checkpoint + as complete. +- Claude Code made 4 Edit tool calls to `docs/specs/02-messaging-update/plan.md`, + using `replace_all=False` to target specific sections. +- Task 8 (Implement `ForemanClient`) acceptance criteria were marked complete + (6 checkboxes changed from `[ ]` to `[x]`). +- Task 9 (Tests for `foremanclient`) acceptance criteria were marked complete + (6 checkboxes changed from `[ ]` to `[x]`). +- Phase 3 checkpoint was marked complete, including the human review step (4 checkboxes changed from `[ ]` to `[x]`). +- All edits succeeded; Claude Code reported Phase 3 fully checked off and ready for Phase 4 implementation. + +## Session 07:44 + +## Session 07:48 + +### 07:48 + + +- User asked Claude Code to commit the current modified and staged files +- Claude Code ran `git status` and `git diff` to examine the state; found 8 staged files including spec docs, + foreman-client package code, and tests +- Claude Code read `.gitignore` and determined that `.claude/`, `.memsearch/`, + and `.superset/` directories were not listed and should not be committed +- Claude Code unstaged the three tooling artifact directories/files using `git restore --staged` +- Claude Code ran `git log --oneline -5` to review project commit message style from recent commits +- Claude Code attempted `git commit` but the pre-commit hook (interrogate) failed + and auto-fixed line breaks in `docs/specs/02-messaging-update/phase-3-review.md` +- Claude Code re-staged the corrected `phase-3-review.md` file and committed successfully + as commit `38c72c0` with message "Address Phase 3 code review: fix resource leak, export types, clean up tests" +- Claude Code offered to add the unstaged tooling artifact directories to `.gitignore` + +### 07:48 + + +- User responded negatively with "no" to a preceding question or proposal from Claude Code. +- Claude Code acknowledged the response with "Got it." diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c0d6954..6d91d0b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,7 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.15.11' + rev: 'v0.15.12' hooks: - id: ruff-format - id: ruff-check @@ -37,11 +37,12 @@ repos: - id: detect-secrets additional_dependencies: ["gibberish-detector"] - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.20.1 + rev: v1.20.2 hooks: - id: mypy args: [--no-strict-optional, --ignore-missing-imports] additional_dependencies: ["toml", "types-PyYAML"] + exclude: ^foreman-client/ - repo: https://github.com/jsh9/pydoclint rev: 0.8.3 hooks: @@ -55,7 +56,7 @@ repos: - id: interrogate exclude: test.* - repo: https://github.com/rvben/rumdl-pre-commit - rev: v0.1.78 # Use latest version + rev: v0.1.83 # Use latest version hooks: - id: rumdl # Lint + auto-fix, fails if unfixable issues remain - id: rumdl-fmt # Pure format, always exits 0 diff --git a/.superset/config.json b/.superset/config.json new file mode 100644 index 0000000..29db9c9 --- /dev/null +++ b/.superset/config.json @@ -0,0 +1,7 @@ +{ + "setup": [ + "uv sync" + ], + "teardown": [], + "run": [] +} diff --git a/agents/issue-triage/issue_triage/agent.py b/agents/issue-triage/issue_triage/agent.py index 52ea05a..c242fc1 100644 --- a/agents/issue-triage/issue_triage/agent.py +++ b/agents/issue-triage/issue_triage/agent.py @@ -2,86 +2,120 @@ from __future__ import annotations -import uuid -from typing import Any, Optional +import asyncio +import os +import threading +from contextlib import asynccontextmanager +from typing import TYPE_CHECKING, AsyncIterator -from fastapi import FastAPI -from pydantic import BaseModel, Field +import structlog +from fastapi import BackgroundTasks, FastAPI +from foremanclient import ForemanClient +from pydantic import BaseModel -app = FastAPI(title="foreman-issue-triage", version="0.1.0") +if TYPE_CHECKING: + from foremanclient.models import DecisionMessage, TaskMessage +logger = structlog.get_logger(__name__) -# --------------------------------------------------------------------------- -# Protocol models (self-contained; mirrors foreman.protocol) -# --------------------------------------------------------------------------- +_HEARTBEAT_INTERVAL: float = 25.0 -class LLMBackendRef(BaseModel): - """Reference to the LLM backend the agent should use.""" +def _get_client(application: FastAPI) -> ForemanClient: + """Return the ForemanClient for *application*, creating it from env vars if needed. - provider: str - model: str + Args: + application: The FastAPI application whose state holds the client. + Returns: + The :class:`~foremanclient.ForemanClient` instance for this agent. + """ + if not hasattr(application.state, "client"): + application.state.client = ForemanClient( + harness_url=os.environ["FOREMAN_HARNESS_URL"], + agent_url=os.environ["AGENT_URL"], + ) + return application.state.client -class TaskContext(BaseModel): - """Context injected by the harness into each task.""" - llm_backend: LLMBackendRef - memory_summary: Optional[str] = None +def triage(task: TaskMessage) -> DecisionMessage: + """Run triage logic on *task* and return a decision. + Args: + task: The incoming triage task from the harness. -class TaskMessage(BaseModel): - """Task message received from the harness.""" + Returns: + A :class:`~foremanclient.models.DecisionMessage` with decision, rationale, and actions. + """ + from prompts.triage import run_triage - task_id: str = Field(default_factory=lambda: str(uuid.uuid4())) - type: str - repo: str - payload: dict[str, Any] - context: TaskContext + return run_triage(task) -class ActionItem(BaseModel): - """A single action the harness should execute.""" +async def _process_task(client: ForemanClient, task: TaskMessage) -> None: + """Call triage on *task* and report the completed decision to the harness. - model_config = {"extra": "allow"} + A daemon heartbeat thread fires every :data:`_HEARTBEAT_INTERVAL` seconds + while triage is running so the harness does not re-queue the task mid-flight. - type: str + Args: + client: The :class:`~foremanclient.ForemanClient` to use for completing the task. + task: The :class:`~foremanclient.models.TaskMessage` to process. + """ + stop_event = threading.Event() + def _heartbeat_loop() -> None: + while not stop_event.wait(timeout=_HEARTBEAT_INTERVAL): + client.heartbeat(task.task_id) -class DecisionMessage(BaseModel): - """Decision returned to the harness.""" + heartbeat_thread = threading.Thread(target=_heartbeat_loop, daemon=True) + heartbeat_thread.start() + try: + decision = await asyncio.to_thread(triage, task) + await asyncio.to_thread(client.complete_task, task.task_id, decision) + finally: + stop_event.set() - task_id: str - decision: str - rationale: str - actions: list[ActionItem] = [] +async def _poll_and_process(client: ForemanClient) -> None: + """Claim the next pending task from the harness and process it if one exists. -# --------------------------------------------------------------------------- -# Triage logic (implemented in Task 15; placeholder here) -# --------------------------------------------------------------------------- + Args: + client: The :class:`~foremanclient.ForemanClient` used to claim tasks. + """ + task = await asyncio.to_thread(client.next_task) + if task is not None: + await _process_task(client, task) -def triage(task: TaskMessage) -> DecisionMessage: - """Run triage logic on *task* and return a decision. +@asynccontextmanager +async def _lifespan(application: FastAPI) -> AsyncIterator[None]: + """FastAPI lifespan: drain all tasks queued while the agent was down. - This placeholder is replaced by the full implementation in - ``prompts/triage.py`` (Task 15). + Loops calling next_task() until the queue is empty so that accumulated + pending tasks are not left stuck after an unclean restart. Args: - task: The incoming triage task from the harness. - - Returns: - A :class:`DecisionMessage` with decision, rationale, and actions. + application: The FastAPI application instance. """ - from prompts.triage import run_triage + client = _get_client(application) + while True: + task = await asyncio.to_thread(client.next_task) + if task is None: + break + await _process_task(client, task) + yield + client.close() - return run_triage(task) +app = FastAPI(title="foreman-issue-triage", version="0.1.0", lifespan=_lifespan) -# --------------------------------------------------------------------------- -# Endpoints -# --------------------------------------------------------------------------- + +class TaskNudge(BaseModel): + """Nudge payload sent by the harness when a new task is enqueued.""" + + task_id: str + """Identifier of the newly enqueued task.""" @app.get("/health") @@ -94,14 +128,17 @@ async def health() -> dict[str, str]: return {"status": "ok"} -@app.post("/task", response_model=DecisionMessage) -async def handle_task(task: TaskMessage) -> DecisionMessage: - """Receive a triage task, run triage logic, and return a decision. +@app.post("/task", status_code=202) +async def handle_task(nudge: TaskNudge, background_tasks: BackgroundTasks) -> dict[str, str]: + """Accept a task nudge and process the task in the background. Args: - task: The incoming :class:`TaskMessage` from the harness. + nudge: The nudge payload containing the task_id from the harness. + background_tasks: FastAPI background task queue. Returns: - A :class:`DecisionMessage` with the triage decision and actions. + JSON body with ``{"status": "accepted"}``. """ - return triage(task) + client = _get_client(app) + background_tasks.add_task(_poll_and_process, client) + return {"status": "accepted"} diff --git a/agents/issue-triage/issue_triage/prompts/triage.py b/agents/issue-triage/issue_triage/prompts/triage.py index c6702e9..349209c 100644 --- a/agents/issue-triage/issue_triage/prompts/triage.py +++ b/agents/issue-triage/issue_triage/prompts/triage.py @@ -8,7 +8,7 @@ import litellm if TYPE_CHECKING: - from agent import DecisionMessage, TaskMessage + from foremanclient.models import DecisionMessage, TaskMessage _VALID_DECISIONS = {"label_and_respond", "close", "escalate", "skip"} @@ -78,7 +78,7 @@ def parse_llm_response( Returns: A validated :class:`~agent.DecisionMessage`. """ - from agent import ActionItem, DecisionMessage + from foremanclient.models import ActionItem, DecisionMessage def _skip(rationale: str = "Could not parse LLM response") -> DecisionMessage: return DecisionMessage(task_id=task_id, decision="skip", rationale=rationale, actions=[]) @@ -160,7 +160,7 @@ def run_triage(task: TaskMessage) -> DecisionMessage: A :class:`~agent.DecisionMessage` with decision, rationale, and actions. """ if _recent_comment_in_memory(task.context.memory_summary): - from agent import DecisionMessage + from foremanclient.models import DecisionMessage return DecisionMessage( task_id=task.task_id, diff --git a/agents/issue-triage/pyproject.toml b/agents/issue-triage/pyproject.toml index 828b14d..66f0c7d 100644 --- a/agents/issue-triage/pyproject.toml +++ b/agents/issue-triage/pyproject.toml @@ -18,4 +18,6 @@ runtime = [ "pydantic>=2.0", "litellm>=1.0.0", "httpx>=0.28.0", + "foreman-client>=0.1.0", + "structlog>=25.5.0", ] diff --git a/config.example.yaml b/config.example.yaml index 11e27a2..b166b6b 100644 --- a/config.example.yaml +++ b/config.example.yaml @@ -14,6 +14,13 @@ llm: polling: interval_seconds: 60 # how often to poll GitHub (seconds) +# queue: +# db_path: null # defaults to ~/.agent-harness/queue.db +# claim_timeout_seconds: 300 # seconds before a stale claimed task is re-enqueued +# max_retries: 3 # max re-enqueue attempts before marking a task failed +# drain_interval_seconds: 10 # how often (seconds) the harness drains completed tasks +# requeue_interval_seconds: 60 # how often (seconds) the harness checks for stale tasks + repos: - owner: callowayproject name: bump-my-version diff --git a/conftest.py b/conftest.py new file mode 100644 index 0000000..b5e4e8e --- /dev/null +++ b/conftest.py @@ -0,0 +1,34 @@ +"""Project-wide pytest configuration.""" + +from __future__ import annotations + +import pytest + + +def pytest_addoption(parser: pytest.Parser) -> None: + """Add --run-integration flag to the pytest CLI. + + Args: + parser: The pytest argument parser. + """ + parser.addoption( + "--run-integration", + action="store_true", + default=False, + help="Run integration tests that require real HTTP and SQLite resources.", + ) + + +def pytest_collection_modifyitems(config: pytest.Config, items: list[pytest.Item]) -> None: + """Skip integration-marked tests unless --run-integration is passed. + + Args: + config: The pytest configuration object. + items: The collected test items. + """ + if config.getoption("--run-integration"): + return + skip_marker = pytest.mark.skip(reason="pass --run-integration to run integration tests") + for item in items: + if "integration" in item.keywords: + item.add_marker(skip_marker) diff --git a/docs/howtos/index.md b/docs/howtos/index.md deleted file mode 100644 index 24a8d17..0000000 --- a/docs/howtos/index.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -title: How Tos -summary: Recipes for common tasks using Foreman. -date: 2026-04-14T16:51:58.665341+00:00 ---- - -The *How To's* are intended as recipes for common tasks using Foreman. -See the API reference for detailed reference documentation and function, class etc definitions. - -!!! warning - - How Tos not implemented yet! - -How-to guides are directions that take the reader through the steps required to solve a real-world problem. -How-to guides are **goal-oriented.** - -[More information](https://diataxis.fr/how-to-guides/) diff --git a/docs/howtos/write-an-agent.md b/docs/howtos/write-an-agent.md new file mode 100644 index 0000000..45c9ff7 --- /dev/null +++ b/docs/howtos/write-an-agent.md @@ -0,0 +1,229 @@ +--- +title: Write an Agent +summary: How to build a Foreman-compatible agent using the foreman-client SDK. +date: 2026-05-04 +--- + +# Write an Agent + +This guide walks you through building a Foreman-compatible agent from scratch. +Agents are HTTP services that receive task nudges from the harness, claim the task from the queue, process it, +and report a decision back — all via `ForemanClient`. + +## Prerequisites + +- Python 3.12+ +- A running Foreman harness (see [Installation](../tutorials/installation.md)) +- `uv` or `pip` for package management + +## Install `foreman-client` + +```bash +uv add foreman-client +# or +pip install foreman-client +``` + +`foreman-client` has two runtime dependencies: `httpx` and `pydantic>=2`. + +## The Three-Method API + +`ForemanClient` exposes exactly three methods an agent needs. + +### `ForemanClient(harness_url, agent_url)` + +| Argument | Type | Description | +|---------------|-------|-----------------------------------------------------------------------------------| +| `harness_url` | `str` | Base URL of the Foreman harness (e.g. `"http://localhost:8000"`). | +| `agent_url` | `str` | This agent's own base URL (e.g. `"http://localhost:9001"`). Sent when claiming tasks so the harness knows which agent holds each claim. | + +Use it as a context manager to ensure the HTTP connection pool is closed on exit: + +```python +with ForemanClient(harness_url="http://localhost:8000", agent_url="http://localhost:9001") as client: + ... +``` + +### `next_task() → TaskMessage | None` + +Claims and returns the next pending task from the harness queue. +Returns `None` when the queue is empty (harness responds `204 No Content`). +Raises `ForemanClientError` on any non-2xx response. + +```python +task = client.next_task() +if task is None: + return # nothing to do +``` + +### `complete_task(task_id, decision)` + +Stores the completed `DecisionMessage` in the queue and wakes the harness drain loop. +Call this once per task, after all processing is done. + +| Argument | Type | Description | +|------------|-------------------|------------------------------------------------------------| +| `task_id` | `str` | The `task_id` from the `TaskMessage` returned by `next_task()`. | +| `decision` | `DecisionMessage` | Your agent's decision, rationale, and action list. | + +> **Note:** Always pass `decision.task_id` as the `task_id` argument. +> Passing a different value causes the nudge and the stored decision to reference different tasks; +> the harness will not raise an error, but the drain loop will not find the intended result. + +```python +from foremanclient import DecisionMessage, DecisionType + +decision = DecisionMessage( + task_id=task.task_id, + decision=DecisionType.label_and_respond, + rationale="Classified as a bug based on the stack trace.", + actions=[{"type": "add_label", "label": "bug"}], +) +client.complete_task(task.task_id, decision) +``` + +### `heartbeat(task_id)` + +Extends the claim window for an in-progress task. +The harness defaults to a 300-second claim timeout (`claim_timeout_seconds` in `QueueConfig`). +If your agent hasn't called `complete_task()` within that window, the harness re-queues the task for another attempt. + +**Call `heartbeat()` at least once every 30 seconds** during long LLM calls or any blocking work. + +```python +import threading + +def _heartbeat_loop(client, task_id, stop_event): + while not stop_event.wait(timeout=25): + client.heartbeat(task_id) + +stop = threading.Event() +t = threading.Thread(target=_heartbeat_loop, args=(client, task.task_id, stop), daemon=True) +t.start() +try: + decision = run_llm(task) +finally: + stop.set() +``` + +## Idempotency + +`task_id` is the idempotency key for every task. +The harness writes each decision to `action_log` before executing GitHub API calls, keyed on `task_id`. + +If `next_task()` returns a task your agent has already completed +(for example, after an unclean restart), check your own records before processing again: + +```python +task = client.next_task() +if task and not already_processed(task.task_id): + decision = process(task) + client.complete_task(task.task_id, decision) +``` + +The simplest approach is to keep a short in-memory set of recently completed `task_id` values. +Across restarts, rely on the harness: if the decision is already in `action_log`, the executor skips duplicate actions. + +## Minimal Working Example + +A complete, runnable agent in under 35 lines. +The lifespan ensures the client is created once and that any tasks queued +while the agent was down are claimed immediately on startup (see [Startup Poll](#startup-poll) for why this matters): + +```python +import os +from contextlib import asynccontextmanager +from fastapi import BackgroundTasks, FastAPI +from foremanclient import DecisionMessage, DecisionType, ForemanClient +from pydantic import BaseModel + +def _decide(task): + return DecisionMessage( + task_id=task.task_id, decision=DecisionType.skip, rationale="No action needed." + ) + +def _run(client): + task = client.next_task() + if task: + client.complete_task(task.task_id, _decide(task)) + +@asynccontextmanager +async def lifespan(app): + client = ForemanClient(os.environ["FOREMAN_HARNESS_URL"], os.environ["AGENT_URL"]) + # Drain any tasks queued while the agent was down + while True: + task = client.next_task() + if task is None: + break + client.complete_task(task.task_id, _decide(task)) + app.state.client = client + yield + client.close() + +app = FastAPI(lifespan=lifespan) + +class TaskNudge(BaseModel): + task_id: str + +@app.get("/health") +def health(): + return {"status": "ok"} + +@app.post("/task", status_code=202) +async def handle_task(nudge: TaskNudge, background_tasks: BackgroundTasks): + background_tasks.add_task(_run, app.state.client) + return {"status": "accepted"} +``` + +Run it with: + +```bash +FOREMAN_HARNESS_URL=http://localhost:8000 AGENT_URL=http://localhost:9001 uvicorn myagent:app --port 9001 +``` + +## Required Endpoints + +Every agent **must** expose: + +| Method | Path | Description | +|--------|-----------|------------------------------------------------------------------| +| `POST` | `/task` | Accept a nudge `{"task_id": "..."}` and return `202 Accepted`. | +| `GET` | `/health` | Health check. Must return `200 OK` with `{"status": "ok"}`. | + +The harness sends a `POST /task` nudge (body: `{"task_id": "..."}`) when a new task is enqueued. +The agent should return 202 immediately and process the task in a background thread or task. + +## Startup Poll + +On startup, loop `next_task()` until it returns `None` to pick up all tasks that were enqueued +while your agent was down: + +```python +from contextlib import asynccontextmanager + +@asynccontextmanager +async def lifespan(app): + client = ForemanClient(...) + while True: + task = client.next_task() + if task is None: + break + _decide_and_complete(task) + yield + client.close() + +app = FastAPI(lifespan=lifespan) +``` + +A single `next_task()` call only claims one task — if N tasks accumulated while the agent was offline, +N-1 remain permanently stuck until the harness requeue cycle fires (up to `claim_timeout_seconds` later). +Looping until `None` is the correct pattern. + +This is the key mechanism for zero task loss under agent restarts. +The harness re-queues stale claimed tasks after `claim_timeout_seconds`, +and the startup drain ensures your agent claims them immediately on boot. + +## Reference + +See the [Agent Protocol Reference](../reference/agent-protocol.md) for the full `TaskMessage`, `DecisionMessage`, +and `ActionItem` schemas. diff --git a/docs/specs/02-messaging-update/SPEC.md b/docs/specs/02-messaging-update/SPEC.md new file mode 100644 index 0000000..325f67f --- /dev/null +++ b/docs/specs/02-messaging-update/SPEC.md @@ -0,0 +1,370 @@ +# Spec: Queue-Mediated Agent Protocol + +**Status:** Draft **Branch:** `message-update-idea` **Replaces:** Synchronous `POST /task → DecisionMessage` dispatch in +`server.py` + +--- + +## 1. Objective + +Replace the current fire-and-forget synchronous HTTP dispatch with a durable, +queue-mediated protocol so that GitHub events are never silently dropped — even when agents are temporarily unavailable +(restarts, cold starts) or permanently down (misconfigured, crashed). + +**Target users:** + +- **Harness operators** — install Foreman, configure repos; gain reliability without extra ops. +- **Agent authors** — build new agents; use `foreman-client` instead of implementing + queue management themselves. + +**MVP acceptance criterion:** Zero task loss under a simulated agent restart. +A task enqueued while the agent is down must be delivered and processed once the agent comes back online. + +## 2. How It Works + +### 2.1 Data Flow (Happy Path) + +```text +GitHub event + → poller.py detects event + → queue.py: INSERT task (status=pending) into queue.db + → Dispatcher nudges agent: POST /task → 202 Accepted (fire-and-forget) + → Agent receives nudge (or polls on startup/interval) + → foreman-client: next_task() → SELECT + UPDATE status=claimed + → Agent processes task, calls complete_task(task_id, decision) + → foreman-client: UPDATE status=completed, result= + → Agent nudges harness: POST /harness/result → 202 Accepted + → Harness drain loop picks up completed task + → executor.py executes actions + → memory.py logs decision and writes summary + → queue.py: UPDATE status=done +``` + +### 2.2 Resilience Paths + +| Scenario | Recovery mechanism | +|-----------------------------------|---------------------------------------------------------------------------------| +| Agent down when nudge sent | Background poll interval on agent startup | +| Agent crashes after claiming task | Harness re-enqueues tasks claimed but not completed within claim timeout | +| Harness misses nudge from agent | Background drain loop polls for completed tasks on a fixed interval | +| Agent processes same task twice | `task_id` is the idempotency key; executor checks `action_log` before executing | + +### 2.3 Claim Timeout and Heartbeat + +- **Claim timeout** (configurable, default **5 minutes**): If a task is claimed but + not completed within this window, the harness re-enqueues it (status → pending, + retry_count incremented). +- **Heartbeat interval** (recommendation for agent authors: **every 30 seconds**): + Agents doing long LLM calls must call `client.heartbeat(task_id)` at least + once per 30 seconds to reset the claim timeout clock. + The `foreman-client` library will document this requirement prominently. + +## 3. New Components + +### 3.1 `queue.db` — Task Queue Database + +A **separate** SQLite database from `memory.db`, stored alongside it +(default: `~/.agent-harness/queue.db`, path overridable in config). +WAL mode enabled at connection time. + +**Schema — `task_queue` table:** + +```sql +CREATE TABLE task_queue ( + task_id TEXT PRIMARY KEY, + agent_url TEXT NOT NULL, + status TEXT NOT NULL DEFAULT 'pending', + -- pending | claimed | completed | done | failed + payload TEXT NOT NULL, -- JSON-serialised TaskMessage + created_at REAL NOT NULL, -- Unix timestamp + claimed_at REAL, + completed_at REAL, + result TEXT, -- JSON-serialised DecisionMessage + retry_count INTEGER NOT NULL DEFAULT 0, + last_heartbeat REAL -- updated by heartbeat() +); +CREATE INDEX idx_task_queue_status ON task_queue (status, agent_url); +``` + +**Status lifecycle:** + +```text +pending → claimed → completed → done + ↘ (timeout) ↗ + pending (retry) + ↘ (max retries exceeded) + failed +``` + +Max retries: configurable, default **3**. + +### 3.2 `foreman/queue.py` — Harness Queue Module + +Owns all SQLite access for the task queue. +Public interface: + +```python +class TaskQueue: + def __init__(self, db_path: Path, claim_timeout_seconds: int = 300) -> None: ... + + def enqueue(self, task: TaskMessage, agent_url: str) -> None: ... + """Insert a new task with status=pending.""" + + def claim_next(self, agent_url: str) -> TaskMessage | None: ... + """Claim the oldest pending task for agent_url; returns None if queue empty.""" + + def complete(self, task_id: str, decision: DecisionMessage) -> None: ... + """Mark task completed and store the DecisionMessage result.""" + + def heartbeat(self, task_id: str) -> None: ... + """Reset last_heartbeat to now, extending the claim window.""" + + def drain_completed(self) -> list[tuple[TaskMessage, DecisionMessage]]: ... + """Return all completed tasks and mark them done. Called by the harness drain loop.""" + + def requeue_stale(self) -> int: ... + """Re-enqueue tasks claimed but not heartbeated within claim_timeout. Returns count.""" + + def fail_exhausted(self, max_retries: int = 3) -> int: ... + """Mark tasks exceeding max_retries as failed. Returns count.""" +``` + +### 3.3 `foreman-client` — Separate PyPI Package + +A thin Python library installed by agent authors. +Lives at `foreman-client/` in this repo (separate `pyproject.toml`). +Published to PyPI as `foreman-client`. + +**Directory structure:** + +```text +foreman-client/ +├── pyproject.toml +└── foremanclient/ + ├── __init__.py + ├── client.py # ForemanClient + └── models.py # Re-exported Pydantic models (TaskMessage, DecisionMessage) +``` + +**Public API:** + +```python +class ForemanClient: + def __init__(self, harness_url: str, agent_url: str) -> None: ... + """ + Args: + harness_url: Base URL of the Foreman harness (e.g. "http://localhost:8000"). + agent_url: This agent's own base URL (used to filter tasks from the queue). + """ + + def next_task(self) -> TaskMessage | None: ... + """Claim and return the next pending task, or None if the queue is empty.""" + + def complete_task(self, task_id: str, decision: DecisionMessage) -> None: ... + """Write the decision result and nudge the harness via POST /harness/result.""" + + def heartbeat(self, task_id: str) -> None: ... + """Reset the claim timeout clock. Call every ~30 seconds during long LLM calls.""" +``` + +Agent authors interact **only** with these three methods. +They do not manage queue connections, retries, or status transitions directly. + +### 3.4 Modified Harness Endpoints + +**`POST /task` (agent-facing, harness → agent nudge)** + +```text +Request body: {"task_id": ""} # optional hint; agent should poll queue regardless +Response: 202 Accepted # always; delivery is queue's job, not HTTP's +``` + +The agent's FastAPI app continues to expose `POST /task`. +The handler now calls `client.next_task()` and processes the result asynchronously, returning 202 immediately. + +**`POST /harness/result` (new harness endpoint, agent → harness nudge)** + +```text +Request body: {"task_id": ""} +Response: 202 Accepted +``` + +Added in `foreman/routers/result.py`. +On receipt, the harness drain loop is triggered immediately (in addition to its background schedule). + +### 3.5 Harness Background Tasks + +Two background asyncio tasks started in the FastAPI lifespan: + +| Task | Interval | Action | +|----------------|------------------|-------------------------------------------------------| +| `drain_loop` | Every 10 seconds | `drain_completed()` → execute actions → update memory | +| `requeue_loop` | Every 60 seconds | `requeue_stale()` + `fail_exhausted()` | + +Intervals are configurable in `config.yaml` under a new `queue:` section. + +--- + +## 4. Configuration Changes + +New `queue:` section in `config.yaml` (and corresponding Pydantic model in `config.py`): + +```yaml +queue: + db_path: ~/.agent-harness/queue.db # optional; defaults to alongside memory.db + claim_timeout_seconds: 300 # default 5 minutes + max_retries: 3 + drain_interval_seconds: 10 + requeue_interval_seconds: 60 +``` + +## 5. Protocol Changes and Migration + +### 5.1 What Changes + +| Component | Before | After | +|----------------------------|----------------------------------------------------------------|---------------------------------------------------| +| `Dispatcher.dispatch()` | Synchronous POST; waits for `DecisionMessage` in response body | Enqueues task; sends nudge; returns immediately | +| Agent `POST /task` handler | Processes task synchronously; returns `DecisionMessage` (200) | Returns 202 immediately; processes task via queue | +| `DecisionMessage` delivery | HTTP response body | Written to `task_queue.result` column | + +### 5.2 Explicit Removal: Synchronous Dispatch Path + +The synchronous dispatch path in `Dispatcher.dispatch()` (`server.py:63–147`) is **removed entirely** in this change. +There is no fallback to synchronous HTTP. +Queue-first is the only delivery mechanism. + +Rationale: two delivery paths means neither is authoritative. +Commit fully to the queue to avoid split-brain between what the queue thinks happened and what HTTP delivered. + +**Migration steps (in implementation order):** + +1. Implement `foreman/queue.py` and `queue.db` schema. +2. Implement `foreman-client` package with tests. +3. Add `POST /harness/result` endpoint to harness. +4. Refactor `Dispatcher.dispatch()` to enqueue + nudge. +5. Add drain and requeue background loops to harness lifespan. +6. Update the reference agent (`agents/issue-triage/agent.py`) to use `ForemanClient`. +7. Delete the synchronous response-parsing block from `Dispatcher.dispatch()`. +8. Remove `response.status_code != 200` error handling (no longer applicable). + +## 6. Project Structure After Change + +```text +foreman/ +├── queue.py # NEW: TaskQueue class (queue.db access) +├── routers/ +│ └── result.py # NEW: POST /harness/result nudge endpoint +├── server.py # MODIFIED: Dispatcher uses queue; adds background loops +├── config.py # MODIFIED: QueueConfig Pydantic model added +└── ... (unchanged) + +foreman-client/ # NEW: separate package +├── pyproject.toml +└── foremanclient/ + ├── __init__.py + ├── client.py + └── models.py + +agents/issue-triage/ +└── agent.py # MODIFIED: uses ForemanClient instead of sync response + +docs/ +└── specs/02-messaging-update/ + ├── idea.md + ├── SPEC.md # this file + └── plan.md # to be created + +docs/how-to/ +└── write-an-agent.md # NEW: agent author guide (task for plan phase) +``` + +## 7. Code Style + +Inherits all project conventions from `CLAUDE.md`: + +- **Formatter/linter:** ruff (line length 119, Google docstring convention) +- **Type checking:** mypy (`--no-strict-optional --ignore-missing-imports`) +- **Docstrings:** interrogate (≥90% coverage), pydoclint (Google style) +- **Type hints:** required on all public functions and methods +- **Python minimum:** 3.12 +- **`foreman-client`** follows the same conventions; its `pyproject.toml` + mirrors the tooling configuration from the main project. + +## 8. Testing Strategy + +### 8.1 `foreman/queue.py` + +- Use a real temp-file SQLite DB via `pytest tmp_path` (never mock SQLite). +- Test each status transition: pending → claimed → completed → done. +- Test `requeue_stale()`: claim a task, advance time past timeout, verify re-enqueue. +- Test `fail_exhausted()`: exhaust retries, verify status=failed. +- Test concurrent claim (two threads calling `claim_next()` simultaneously) — + only one should receive the task. + +### 8.2 `foreman-client` + +- Unit tests with a mock harness server (use `httpx.MockTransport` or `respx`). +- Test `next_task()` when queue empty returns `None`. +- Test `complete_task()` sends nudge to `POST /harness/result`. +- Test `heartbeat()` updates `last_heartbeat`. + +### 8.3 `Dispatcher` (harness) + +- Mock `TaskQueue` at the boundary; verify `enqueue()` is called with correct + `TaskMessage` and `agent_url`. +- Verify nudge HTTP POST is fire-and-forget (does not block on agent response). +- Test drain loop: mock `drain_completed()` returning tasks; verify `executor.execute()` + is called and memory is updated. +- Test requeue loop: verify `requeue_stale()` and `fail_exhausted()` are called. + +### 8.4 Integration Test + +- Spin up the harness and the reference agent (`agents/issue-triage/`) locally. +- Send a GitHub event to the harness poller. +- Stop the agent container immediately after task is enqueued. +- Restart the agent container. +- Assert the task was claimed and completed (inspect `task_queue` status=done). +- Assert the `action_log` has the expected decision entry. +- **This test is the primary acceptance gate for the MVP criterion.** + +### 8.5 Coverage Target + +≥85% line / ≥80% branch for `foreman/queue.py` and `foremanclient/client.py`. + +## 9. Boundaries + +### Always Do + +- Enqueue every event before any dispatch attempt; the queue is the source of truth. +- Write every decision to `action_log` before executing actions (existing invariant, preserved). +- Use WAL mode for `queue.db`; open with `check_same_thread=False`. +- Log structured events for every status transition (enqueue, claim, complete, requeue, fail). + +### Ask First (Require Explicit Config) + +- `allow_close: true` — closing issues (unchanged from current behavior). +- `max_retries` changes beyond the default — operators must set this deliberately. + +### Never Do + +- Store raw secrets in `queue.db` or task payloads (GitHub tokens must not appear in `payload`). +- Execute shell commands or arbitrary code from task payloads. +- Let agent containers access `queue.db` directly — all queue I/O goes through + `foreman-client` ↔ harness API (the harness owns the database file). +- Add a synchronous dispatch fallback path. +- Expose `GET /queue/status` in MVP — structured log output only. + +## 10. Out of Scope (MVP) + +- Multiple agent containers per queue (no consumer groups). +- External queue backends (Redis, NATS) — pluggable interface defined, SQLite only implemented. +- Task prioritization or ordering beyond FIFO. +- Monitoring UI. +- `GET /queue/status` operator endpoint. + +## 11. Documentation Task + +The plan phase must include a task to produce `docs/how-to/write-an-agent.md` covering the `foreman-client` API, +heartbeat requirements, idempotency contract, and a minimal agent example using `ForemanClient`. +This doc is the primary reference for agent authors. diff --git a/docs/specs/02-messaging-update/idea.md b/docs/specs/02-messaging-update/idea.md index 646d921..fd96d7c 100644 --- a/docs/specs/02-messaging-update/idea.md +++ b/docs/specs/02-messaging-update/idea.md @@ -1,6 +1,79 @@ -# Messaging update +# Messaging Update: Queue-Mediated Agent Protocol ## Problem Statement -The current wire protocol doesn't handle the case where a message is sent to a node that is not connected. -This leads to missed events with no way to recover. +How might we ensure GitHub events dispatched to agent containers are processed reliably, +even when agents are temporarily unavailable? + +## Recommended Direction + +The harness owns a task queue (SQLite by default, pluggable interface). +Events are enqueued before any dispatch attempt — the queue is the source of truth. +`POST /task → 202 Accepted` becomes a nudge ("check your queue now"), not a delivery mechanism. +Agents poll the queue at startup, on a background interval, and when nudged. + +Results flow symmetrically: the agent writes its DecisionMessage back to the queue, +then POSTs to `POST /harness/result → 202 Accepted` to nudge the harness. +The harness also has a background task that periodically checks for completed tasks. +HTTP nudges are optimizations that degrade gracefully — the queue always wins. + +This preserves the core constraint: the harness owns all infrastructure. +Agents embed a thin `foreman-client` library that handles queue I/O. +Agent authors call `client.next_task()` and `client.complete_task(task_id, decision)`. +They don't implement queue management. + +## Key Assumptions to Validate + +- [ ] SQLite with WAL mode handles concurrent harness writes + agent reads + without contention — benchmark before committing the schema +- [ ] Agents are Python (or can embed a Python client) — validate the agent + container build process supports a shared library dependency +- [ ] 202 nudge + background poll provides acceptable end-to-end latency — + define "acceptable" explicitly (target: < 30s for MVP) +- [ ] One agent per queue is sufficient for MVP — the queue abstraction must + not bake in single-consumer assumptions that block future fan-out + +## MVP Scope + +**In:** + +- `task_queue` table in existing `memory.db`: task_id, agent_url, status, + payload, created_at, claimed_at, completed_at, result, retry_count +- Harness writes: enqueue on poll event; `POST /task → 202` nudge to agent; + `POST /result` endpoint for agent callback; background drain loop for + completed tasks; re-enqueue tasks claimed but not completed within timeout +- Harness reads: poll queue for completed tasks on callback + interval +- `foreman-client` lib: `next_task()`, `complete_task(task_id, decision)`, + `heartbeat(task_id)` — heartbeat resets the claim timeout clock +- Agent protocol: `POST /task → 202` (nudge only); startup queue poll; + configurable background poll interval +- Delivery guarantee: at-least-once; task_id is the idempotency key + +**Out:** + +- Multiple agent containers per queue (no consumer groups in MVP) +- External queue backends (Redis, NATS) — define pluggable interface, + implement SQLite only +- Task prioritization or ordering beyond FIFO +- Monitoring UI — structured log output only + +## Not Doing (and Why) + +- **Agent-owned queues** — every agent author would reimplement queue logic; + harness owns infrastructure +- **Exactly-once delivery** — requires distributed coordination; at-least-once + - idempotency is sufficient and far simpler +- **File-system queuing** — ephemeral in containers; shared volumes add + deployment surface for no real gain over SQLite +- **Keep synchronous dispatch as fallback** — two delivery paths means neither + is authoritative; commit to queue-first fully + +## Open Questions + +- What is the claim timeout? + If an agent pulls a task and crashes before completing, the harness must detect and re-enqueue it — + define the TTL and re-enqueue logic before writing the schema. +- Is `foreman-client` a separate PyPI package, part of the `foreman` package, + or vendored into each agent at build time? +- Should `GET /queue/status` be exposed on the harness for operator visibility, + or is structured logging sufficient for MVP? diff --git a/docs/specs/02-messaging-update/phase-3-fixes.md b/docs/specs/02-messaging-update/phase-3-fixes.md new file mode 100644 index 0000000..46dd152 --- /dev/null +++ b/docs/specs/02-messaging-update/phase-3-fixes.md @@ -0,0 +1,77 @@ +# Phase 3 Review — Remaining Fixes + +Status of all items from `phase-3-review.md` as of 2026-05-05. + +--- + +## Already Resolved + +| # | Finding | Where fixed | +|---|---------|-------------| +| 1 | `httpx.Client` never closed | `client.py` — `close()`, `__enter__`, `__exit__` added; lifecycle tests in `TestForemanClientLifecycle` | +| 2 | `LLMBackendRef` / `TaskContext` not exported | `__init__.py` — both added to imports and `__all__` | +| 3 | `import json` inside test methods | `test_client.py:5` — moved to module-level | +| 4 | Misleading ordering comment | Removed; `test_sends_decision_then_nudge` now asserts both routes called | + +--- + +## Remaining Work + +### Task A — Add configurable `timeout` to `ForemanClient` + +**Priority:** Low (suggestion from review finding #6) + +**Files:** `foreman-client/foremanclient/client.py`, `foreman-client/tests/test_client.py` + +**What to do:** + +1. Add `timeout: float = 5.0` parameter to `ForemanClient.__init__`. +2. Pass it to `httpx.Client(base_url=harness_url, timeout=timeout)`. +3. Update the class docstring Args section to document the new parameter. +4. Add one test in `TestForemanClientLifecycle` verifying that the timeout value is + forwarded to the underlying `httpx.Client`. + +**Acceptance criteria:** + +- [x] `ForemanClient("http://h", "http://a", timeout=10.0)` constructs without error. +- [x] `httpx.Client` is initialised with the supplied timeout value. +- [x] Default behaviour (no `timeout` arg) is unchanged — uses 5.0 s. +- [x] Docstring documents the parameter. +- [x] New test passes; full test suite passes; pre-commit clean. + +--- + +### Task B — Document `task_id` / `decision.task_id` identity in API docs + +**Priority:** Low (spec design note from review finding #5) + +**File:** `docs/howtos/write-an-agent.md` + +**What to do:** + +Add a short callout under the `complete_task(task_id, decision)` section noting +that `task_id` must equal `decision.task_id`. +The current example already uses `task.task_id` for both arguments, +but a reader constructing a `DecisionMessage` independently might pass mismatched values silently. + +Suggested addition (after the parameter table): + +```text +> **Note:** Always pass `decision.task_id` as the `task_id` argument. +> Passing a different value causes the nudge and the stored decision to +> reference different tasks; the harness will not raise an error, but +> the drain loop will not find the intended result. +``` + +**Acceptance criteria:** + +- [x] Callout appears in the `complete_task` section. +- [x] The existing code examples are unchanged. +- [x] pre-commit clean. + +--- + +## Suggested order + +Run Task A first (code change + test), then Task B (docs). +Both are small and can be done in a single commit or as two separate commits. diff --git a/docs/specs/02-messaging-update/phase-3-review.md b/docs/specs/02-messaging-update/phase-3-review.md new file mode 100644 index 0000000..348d594 --- /dev/null +++ b/docs/specs/02-messaging-update/phase-3-review.md @@ -0,0 +1,102 @@ +# Phase 3 Code Review — `foreman-client` + +Reviewed commit `adffcef` (Phase 3 implementation). +Files: `foreman-client/foremanclient/client.py`, `models.py`, `__init__.py`, `tests/test_client.py`. + +--- + +## Findings + +### Important — Fix before Phase 4 + +#### 1. `httpx.Client` is never closed (`client.py:51`) + +`ForemanClient.__init__` creates `self._http = httpx.Client(base_url=harness_url)` but the class has no `close()` method +and no context manager support. +The `httpx.Client` holds a connection pool; in long-running agent services this leaks file descriptors. + +**Fix:** add `close()` and `__enter__`/`__exit__`: + +```python +def close(self) -> None: + """Close the underlying HTTP connection pool.""" + self._http.close() + +def __enter__(self) -> ForemanClient: + return self + +def __exit__(self, *_: object) -> None: + self.close() +``` + +#### 2. `LLMBackendRef` and `TaskContext` not exported (`__init__.py:4`) + +Both types appear as nested fields of `TaskMessage`. +Agent authors constructing `TaskMessage` instances in unit tests need them. +They should be in `__all__` alongside `TaskMessage`. + +**Fix:** add to `__init__.py`: + +```python +from foremanclient.models import ( + ActionItem, DecisionMessage, DecisionType, + LLMBackendRef, TaskContext, TaskMessage, +) + +__all__ = [ + ..., + "LLMBackendRef", + "TaskContext", +] +``` + +--- + +### Suggestions — Lower priority + +#### 3. `import json` inside test methods (`test_client.py:119, 133, 185`) + +`import json` appears inside three test method bodies. +Move to module-level imports. + +#### 4. Misleading ordering comment in test (`test_client.py:105-107`) + +The comment "Verify /queue/complete was called first" is followed by assertions that only check requests are not None — +not actual call order. +Either remove the comment or verify ordering via `respx.calls` timestamp/index. + +#### 5. Redundant `task_id` param on `complete_task` (spec design note) + +`complete_task(task_id, decision)` — `task_id` is already in `decision.task_id`. +The nudge uses the positional `task_id` while the complete body uses `decision.model_dump()`. +If a caller passes mismatched values, both calls proceed with different IDs silently. +This matches the spec's stated API, so it is a spec-level design concern rather than a code bug. + +Consider for Phase 6 (API docs): note that callers should always pass `decision.task_id` as `task_id`. + +#### 6. No configurable timeout on `httpx.Client` (`client.py:51`) + +Defaults to httpx's 5-second connect + read timeout. +For heartbeat callers, a failing harness causes a 5-second block. +Exposing a `timeout: float = 5.0` constructor parameter would let agent authors tune this. + +--- + +## Axes Verdict + +| Axis | Result | Notes | +|---|---|---| +| Correctness | Pass (with caveats) | Resource leak and missing exports are the gaps | +| Readability | Pass | Minor import style issue in tests | +| Architecture | Pass | Clean standalone package; correct HTTP contract | +| Security | Pass | No SSRF risk; no secrets in logs | +| Performance | Pass | Connection pooling correct; timeout note is minor | + +--- + +## Status + +- [ ] Fix `httpx.Client` lifecycle (close + context manager) — **before Phase 4 merge** +- [ ] Export `LLMBackendRef` and `TaskContext` — **before Phase 4 merge** +- [ ] Move `import json` to module level in tests — anytime +- [ ] Remove misleading ordering comment — anytime diff --git a/docs/specs/02-messaging-update/plan.md b/docs/specs/02-messaging-update/plan.md new file mode 100644 index 0000000..7c80f61 --- /dev/null +++ b/docs/specs/02-messaging-update/plan.md @@ -0,0 +1,640 @@ +# Implementation Plan: Queue-Mediated Agent Protocol + +## Overview + +Replace the synchronous `POST /task → DecisionMessage` dispatch in `server.py` with a durable, SQLite-backed task queue. +Events are enqueued before any dispatch attempt; agents claim tasks via HTTP; +the harness drains completed tasks on a background loop. +Zero task loss under an agent restart is the MVP acceptance criterion. + +## Architecture Decisions + +- **Config-first:** Add `QueueConfig` to `config.py` before writing `TaskQueue` — the timeout + and retry defaults flow from config into every other component. +- **Harness owns the database:** Agents never touch `queue.db` directly. + All queue I/O goes through HTTP endpoints on the harness. + `foreman-client` wraps these calls. +- **Three new harness endpoints:** `POST /queue/next` (claim), `POST /queue/complete` + (store result), `POST /queue/heartbeat` (extend claim window); plus `POST /harness/result` (drain nudge). + Only `/harness/result` is specified in the spec; + the other three are the implicit contract required by `ForemanClient.next_task()` / `complete_task()` / `heartbeat()`. +- **`complete_task()` does two things:** stores the `DecisionMessage` in the queue DB *and* + sends `POST /harness/result` to nudge the drain loop — so agent authors call only one method. +- **Delete the synchronous path entirely in Phase 4:** no fallback, no feature flag. + +## Open Questions (resolve before Phase 4) + +- What HTTP status code should `POST /queue/next` return when the queue is empty — + `204 No Content` or `200` with a `null` body? + (Plan assumes `204`.) +- Should `POST /queue/complete` accept a standalone `DecisionMessage`, or a wrapper `{task_id, decision}`? + (Plan assumes the full `DecisionMessage` as the body, since it already carries `task_id`.) + +## Task List + +### Phase 1: Configuration and Queue Foundation + +#### Task 1: Add `QueueConfig` to `config.py` + +**Description:** Extend `ForemanConfig` with a new optional `queue: QueueConfig` section. +Mirror the pattern used for `PollingConfig` — a Pydantic model with typed fields and defaults, +added as an optional field on `ForemanConfig`. +Update `config.example.yaml` with the new section (commented out, showing defaults). + +**Acceptance criteria:** + +- [x] `QueueConfig` model exists with fields: `db_path: Path | None`, `claim_timeout_seconds: int = 300`, + `max_retries: int = 3`, `drain_interval_seconds: int = 10`, `requeue_interval_seconds: int = 60` +- [x] `ForemanConfig.queue` defaults to a zero-config `QueueConfig()` when the section is absent +- [x] `${VAR}` references in `db_path` resolve correctly (inherits `_resolve_refs_in`) +- [x] Existing config tests still pass + +**Verification:** + +- [x] `uv run pytest --agent-digest=term tests/test_config.py` +- [x] `pre-commit run --all-files` + +**Dependencies:** None + +**Files likely touched:** + +- `foreman/config.py` +- `config.example.yaml` +- `tests/test_config.py` + +**Estimated scope:** S + +#### Task 2: Implement `foreman/queue.py` — `TaskQueue` + +**Description:** Create `foreman/queue.py` with the `TaskQueue` class and `queue.db` schema. +Follow the exact patterns from `memory.py`: `PRAGMA journal_mode=WAL`, `check_same_thread=False`, +`executescript` for DDL, no ORM. +Implement all six public methods from the spec. + +The `claim_next()` method must use a single `UPDATE … RETURNING` +or a `SELECT … FOR UPDATE` workaround to be concurrency-safe under multiple simultaneous callers +(SQLite serialises writes, so `BEGIN IMMEDIATE` + `SELECT` + `UPDATE` in a single transaction is sufficient). + +**Acceptance criteria:** + +- [x] `queue.db` schema matches spec (§3.1): `task_queue` table with all columns + index +- [x] `enqueue()` inserts with `status=pending` +- [x] `claim_next()` atomically claims oldest pending task for the given `agent_url`; returns `None` when empty +- [x] `complete()` sets `status=completed` and stores the serialised `DecisionMessage` +- [x] `heartbeat()` updates `last_heartbeat` +- [x] `drain_completed()` returns all `completed` rows and sets them to `done` +- [x] `requeue_stale()` re-enqueues `claimed` tasks past the claim timeout; increments `retry_count` +- [x] `fail_exhausted()` marks tasks with `retry_count >= max_retries` as `failed` +- [x] DB file and parent directories are auto-created (matching `MemoryStore` behaviour) + +**Verification:** + +- [x] `uv run pytest --agent-digest=term tests/test_queue.py` (written in Task 3) +- [x] `pre-commit run --all-files` + +**Dependencies:** Task 1 + +**Files likely touched:** + +- `foreman/queue.py` (new) + +**Estimated scope:** M + +#### Task 3: Tests for `TaskQueue` + +**Description:** Write `tests/test_queue.py` covering all `TaskQueue` methods. +Use a real temp-file SQLite DB via `pytest tmp_path` — never mock SQLite. +Use `freezegun` or manual timestamp manipulation to test timeout-based behaviour. + +**Acceptance criteria:** + +- [x] Schema creation: `task_queue` table and index exist after init +- [x] `enqueue` + `claim_next` happy path: task round-trips correctly +- [x] `claim_next` returns `None` on empty queue +- [x] `complete` + `drain_completed`: completed task is returned and marked `done` +- [x] `requeue_stale`: task claimed but not heartbeated past timeout → re-enqueued, `retry_count` incremented +- [x] `fail_exhausted`: task at `max_retries` → `status=failed` +- [x] Concurrent claim: two threads call `claim_next()` simultaneously; only one receives the task +- [x] Coverage ≥85% line / ≥80% branch for `foreman/queue.py` + +**Verification:** + +- [x] `uv run pytest --agent-digest=term tests/test_queue.py --cov=foreman/queue.py` +- [x] `pre-commit run --all-files` + +**Dependencies:** Task 2 + +**Files likely touched:** + +- `tests/test_queue.py` (new) + +**Estimated scope:** M + +### Checkpoint: Phase 1 + +- [x] `uv run pytest --agent-digest=term` — all tests pass +- [x] `pre-commit run --all-files` — clean +- [x] `TaskQueue` is fully exercised; concurrent-claim test passes +- [x] Human review before proceeding + +### Phase 2: Harness Queue API Endpoints + +#### Task 4: Queue HTTP endpoints — `foreman/routers/queue.py` + +**Description:** Add three new harness endpoints that `ForemanClient` will call. +Follow the existing router pattern (`foreman/routers/health.py`). +The router receives a `TaskQueue` instance via FastAPI dependency injection (use `app.state.task_queue`). + +| Endpoint | Body | Response | +|-------------------------|------------------------|---------------------------------------| +| `POST /queue/next` | `{"agent_url": "..."}` | `TaskMessage` (200) or 204 No Content | +| `POST /queue/complete` | `DecisionMessage` JSON | 202 Accepted | +| `POST /queue/heartbeat` | `{"task_id": "..."}` | 202 Accepted | + +`POST /queue/complete` calls `TaskQueue.complete()` then immediately triggers the drain loop +(same signal mechanism used by `POST /harness/result`). + +**Acceptance criteria:** + +- [x] `POST /queue/next` returns 200 + `TaskMessage` JSON when a task is available +- [x] `POST /queue/next` returns 204 when the queue is empty +- [x] `POST /queue/complete` stores the decision and returns 202 +- [x] `POST /queue/heartbeat` updates `last_heartbeat` and returns 202 +- [x] All endpoints return 202 immediately (no blocking on downstream work) +- [x] Router is included in `app` (registered in `server.py`) + +**Verification:** + +- [x] `uv run pytest --agent-digest=term tests/test_queue_router.py` (written in Task 6) +- [x] `pre-commit run --all-files` + +**Dependencies:** Tasks 2, 3 + +**Files likely touched:** + +- `foreman/routers/queue.py` (new) +- `foreman/server.py` (register router, expose `task_queue` on `app.state`) + +**Estimated scope:** M + +#### Task 5: `POST /harness/result` endpoint — `foreman/routers/result.py` + +**Description:** Add the agent-nudge endpoint from spec §3.4. +On receipt, it triggers the drain loop immediately (in addition to its background schedule). +The trigger mechanism is an `asyncio.Event` set in the background loop and reset after each drain; +`POST /harness/result` sets the event. + +**Acceptance criteria:** + +- [x] `POST /harness/result` accepts `{"task_id": ""}` and returns 202 Accepted +- [x] Receiving the nudge triggers the drain loop event (verified by inspecting `app.state`) +- [x] Router is included in `app` + +**Verification:** + +- [x] `uv run pytest --agent-digest=term tests/test_result_router.py` (written in Task 6) +- [x] `pre-commit run --all-files` + +**Dependencies:** Task 4 + +**Files likely touched:** + +- `foreman/routers/result.py` (new) +- `foreman/server.py` (register router) + +**Estimated scope:** S + +#### Task 6: Tests for harness queue endpoints + +**Description:** Write `tests/test_queue_router.py` and `tests/test_result_router.py` using FastAPI's `TestClient`. +Mock `TaskQueue` at the boundary (not SQLite — the queue is already tested in Task 3). +Verify HTTP contracts only. + +**Acceptance criteria:** + +- [x] `POST /queue/next` — 200 with task body when queue has a task +- [x] `POST /queue/next` — 204 when `claim_next()` returns `None` +- [x] `POST /queue/complete` — 202; `TaskQueue.complete()` called with correct args +- [x] `POST /queue/heartbeat` — 202; `TaskQueue.heartbeat()` called with correct `task_id` +- [x] `POST /harness/result` — 202; drain event is set + +**Verification:** + +- [x] `uv run pytest --agent-digest=term tests/test_queue_router.py tests/test_result_router.py` +- [x] `pre-commit run --all-files` + +**Dependencies:** Tasks 4, 5 + +**Files likely touched:** + +- `tests/test_queue_router.py` (new) +- `tests/test_result_router.py` (new) + +**Estimated scope:** M + +### Checkpoint: Phase 2 + +- [x] `uv run pytest --agent-digest=term` — all tests pass +- [x] All three queue endpoints + `/harness/result` exist and return correct status codes +- [x] Human review before proceeding + +### Phase 3: `foreman-client` Package + +#### Task 7: Scaffold `foreman-client` package + `models.py` + +**Description:** Create the `foreman-client/` directory tree with its own `pyproject.toml` +(mirroring the main project's tooling: ruff, mypy, interrogate, pydoclint). +Add `models.py` that re-exports `TaskMessage` and `DecisionMessage` from `foreman.protocol` — or, +since `foreman-client` must be installable independently, +copy the minimal Pydantic models into `foremanclient/models.py` (no dependency on the `foreman` package). + +**Acceptance criteria:** + +- [x] Directory structure matches spec §3.3 +- [x] `foremanclient/models.py` defines `TaskMessage` and `DecisionMessage` as standalone + Pydantic models (no `foreman.*` imports) +- [x] `pyproject.toml` has `httpx` and `pydantic>=2` as runtime deps; dev deps mirror main project +- [x] `uv sync` inside `foreman-client/` succeeds +- [x] `pre-commit run --all-files` passes inside `foreman-client/` + +**Verification:** + +- [x] `cd foreman-client && uv sync && pre-commit run --all-files` + +**Dependencies:** Tasks 4, 5 (need to know the HTTP contract) + +**Files likely touched:** + +- `foreman-client/pyproject.toml` (new) +- `foreman-client/foremanclient/__init__.py` (new) +- `foreman-client/foremanclient/models.py` (new) + +**Estimated scope:** S + +#### Task 8: Implement `ForemanClient` in `foremanclient/client.py` + +**Description:** Implement the three public methods using `httpx`. +All HTTP calls are synchronous (no `asyncio` in the client — agent authors control their own async if needed). + +- `next_task()` → `POST /queue/next` → parse `TaskMessage` or return `None` on 204 +- `complete_task(task_id, decision)` → `POST /queue/complete` (stores decision) then + `POST /harness/result` (nudges drain) +- `heartbeat(task_id)` → `POST /queue/heartbeat` + +Log structured events for each call using `structlog` +(already a dep in the main project; add it to `foreman-client` as well). + +**Acceptance criteria:** + +- [x] `next_task()` returns a `TaskMessage` on 200, `None` on 204 +- [x] `complete_task()` sends decision to `/queue/complete` then sends nudge to `/harness/result` +- [x] `heartbeat()` sends `{"task_id": ...}` to `/queue/heartbeat` +- [x] All methods raise `ForemanClientError` (a custom exception) on non-2xx responses +- [x] All public methods and the class have Google-style docstrings (pydoclint passes) +- [x] Type hints on all public methods + +**Verification:** + +- [x] `uv run pytest --agent-digest=term` inside `foreman-client/` (tests written in Task 9) +- [x] `pre-commit run --all-files` inside `foreman-client/` + +**Dependencies:** Task 7 + +**Files likely touched:** + +- `foreman-client/foremanclient/client.py` (new) +- `foreman-client/foremanclient/__init__.py` (update exports) + +**Estimated scope:** M + +#### Task 9: Tests for `foremanclient` + +**Description:** Write `foreman-client/tests/test_client.py` using `respx` +(or `httpx.MockTransport`) to mock the harness HTTP endpoints. +Never spin up a real harness. + +**Acceptance criteria:** + +- [x] `next_task()` returns `TaskMessage` when harness returns 200 + JSON +- [x] `next_task()` returns `None` when harness returns 204 +- [x] `complete_task()` sends `DecisionMessage` JSON to `/queue/complete` then nudge to `/harness/result` +- [x] `heartbeat()` sends `{"task_id": ...}` to `/queue/heartbeat` +- [x] `ForemanClientError` raised on 4xx/5xx responses +- [x] Coverage ≥85% line / ≥80% branch for `foremanclient/client.py` + +**Verification:** + +- [x] `cd foreman-client && uv run pytest --agent-digest=term --cov=foremanclient/client.py` +- [x] `pre-commit run --all-files` inside `foreman-client/` + +**Dependencies:** Task 8 + +**Files likely touched:** + +- `foreman-client/tests/__init__.py` (new) +- `foreman-client/tests/test_client.py` (new) + +**Estimated scope:** M + +### Checkpoint: Phase 3 + +- [x] `foreman-client` tests pass with ≥85% line coverage +- [x] `pre-commit run --all-files` passes in both `foreman-client/` and root +- [x] Human review of `ForemanClient` public API before proceeding (API is the contract agent + authors depend on — changes after this point are breaking) + +### Phase 4: Dispatcher Refactor and Background Loops + +#### Task 10: Refactor `Dispatcher.dispatch()` to enqueue + nudge + +**Description:** Replace the synchronous HTTP POST in `Dispatcher.dispatch()` with: + +1. `task_queue.enqueue(task, agent_url=route_target.url)` +2. Fire-and-forget `POST /task` nudge to the agent (body: `{"task_id": task.task_id}`) + using `httpx.AsyncClient` with a short timeout (5 s); log and continue on failure. + +Remove the synchronous response-parsing block +(lines 118–147 in current `server.py`), +the `response.status_code != 200` check, and `DecisionMessage` parsing from this method. +The method now returns immediately after the nudge. + +The `Dispatcher` constructor gains a `task_queue: TaskQueue` parameter. + +**Acceptance criteria:** + +- [x] `dispatch()` calls `task_queue.enqueue()` with correct `TaskMessage` and `agent_url` +- [x] `dispatch()` sends `POST /task` with body `{"task_id": ...}` and returns 202 +- [x] `dispatch()` does not await agent response or parse `DecisionMessage` +- [x] Nudge HTTP errors are logged and swallowed (fire-and-forget) +- [x] All synchronous response-parsing code is deleted +- [x] `Dispatcher.__init__` accepts `task_queue: TaskQueue` + +**Verification:** + +- [x] `uv run pytest --agent-digest=term tests/test_server.py` +- [x] `pre-commit run --all-files` + +**Dependencies:** Tasks 2, 6 + +**Files likely touched:** + +- `foreman/server.py` +- `tests/test_server.py` (update existing tests) + +**Estimated scope:** M + +#### Task 11: Add drain and requeue background loops to FastAPI lifespan + +**Description:** Add a FastAPI lifespan context manager to `server.py` that starts two background `asyncio` tasks: + +| Task | Interval | Action | +|----------------|-------------------------------------------------|-----------------------------------------------------------------------------------------------------| +| `drain_loop` | `queue.drain_interval_seconds` (default 10 s) | `drain_completed()` → `executor.execute()` → `memory.upsert_memory_summary()` → `queue.mark_done()` | +| `requeue_loop` | `queue.requeue_interval_seconds` (default 60 s) | `requeue_stale()` + `fail_exhausted()` | + +The drain loop also wakes immediately when `POST /harness/result` sets the drain `asyncio.Event` +(the event is stored on `app.state.drain_event`). + +Both tasks are cancelled cleanly on shutdown. + +**Acceptance criteria:** + +- [x] `drain_loop` calls `drain_completed()` and passes each `(TaskMessage, DecisionMessage)` to + `executor.execute()` and `memory.upsert_memory_summary()` +- [x] `drain_loop` wakes immediately when `drain_event` is set +- [x] `requeue_loop` calls `requeue_stale()` and `fail_exhausted(max_retries=config.queue.max_retries)` +- [x] Both tasks log structured events on each cycle +- [x] Both tasks are cancelled without error on SIGINT/shutdown + +**Verification:** + +- [x] `uv run pytest --agent-digest=term tests/test_server.py` +- [x] `pre-commit run --all-files` + +**Dependencies:** Task 10 + +**Files likely touched:** + +- `foreman/server.py` +- `tests/test_server.py` + +**Estimated scope:** M + +#### Task 12: Wire `TaskQueue` into `__main__.py` + +**Description:** Update `_run_start()` +and `_run_loop()` in `__main__.py` to construct a `TaskQueue` from `config.queue`, pass it to `Dispatcher`, +and attach it to `app.state` so the router dependencies can access it. +Add `--queue-db` CLI argument (overrides `config.queue.db_path`). + +**Acceptance criteria:** + +- [x] `TaskQueue` is constructed with the resolved `db_path` and `claim_timeout_seconds` +- [x] `Dispatcher` receives the `task_queue` instance +- [x] `app.state.task_queue` and `app.state.drain_event` are set before the server starts +- [x] Default `db_path` is `~/.agent-harness/queue.db` when not set in config +- [x] Existing `--db` arg for `memory.db` is unchanged + +**Verification:** + +- [x] `uv run pytest --agent-digest=term tests/test_main.py` +- [x] `pre-commit run --all-files` + +**Dependencies:** Tasks 10, 11 + +**Files likely touched:** + +- `foreman/__main__.py` +- `tests/test_main.py` + +**Estimated scope:** S + +#### Task 13: Tests for updated `Dispatcher` and background loops + +**Description:** Update and extend `tests/test_server.py`. +Mock `TaskQueue` at the boundary (not SQLite). +Test the drain loop by injecting a mocked `drain_completed()` return and verifying `executor.execute()` is called. + +**Acceptance criteria:** + +- [x] `dispatch()` test: `enqueue()` called with correct task + agent_url; nudge POST is fire-and-forget +- [x] `dispatch()` test: nudge HTTP error is swallowed and logged; no exception propagated +- [x] Drain loop test: `drain_completed()` returning one task → `executor.execute()` called once +- [x] Drain loop test: `drain_event` set → drain loop wakes immediately +- [x] Requeue loop test: `requeue_stale()` and `fail_exhausted()` called on schedule +- [x] No test directly touches `queue.db` + +**Verification:** + +- [x] `uv run pytest --agent-digest=term tests/test_server.py` +- [x] `pre-commit run --all-files` + +**Dependencies:** Tasks 10, 11, 12 + +**Files likely touched:** + +- `tests/test_server.py` + +**Estimated scope:** M + +### Checkpoint: Phase 4 + +- [x] `uv run pytest --agent-digest=term` — all tests pass +- [x] Synchronous dispatch path is fully deleted from `server.py` +- [x] `pre-commit run --all-files` — clean +- [ ] Human review before proceeding + +### Phase 5: Agent Update + +#### Task 14: Update reference agent to use `ForemanClient` + +**Description:** Rewrite `agents/issue-triage/issue_triage/agent.py` to use `ForemanClient`. +The `POST /task` endpoint now accepts `{"task_id": ""}`, returns 202 immediately, +and fires an asyncio background task that calls `client.next_task()`, processes it, and calls `client.complete_task()`. + +Remove the inline `TaskMessage` / `DecisionMessage` model definitions (they came from `foremanclient.models`). +Add `foreman-client` as a runtime dependency in the agent's `pyproject.toml`. + +Add a startup poll: on `@app.on_event("startup")` +(or lifespan), call `client.next_task()` to pick up any tasks queued while the agent was down. + +**Acceptance criteria:** + +- [x] `POST /task` returns 202 Accepted immediately (not 200 + body) +- [x] Background task calls `client.next_task()` and `client.complete_task()` +- [x] Startup poll calls `client.next_task()` once on boot +- [x] Agent no longer defines its own `TaskMessage` / `DecisionMessage` models +- [x] `foreman-client` appears in `agents/issue-triage/pyproject.toml` dependencies +- [x] `GET /health` is unchanged + +**Verification:** + +- [x] `uv run pytest --agent-digest=term tests/test_agent_server.py` +- [x] `pre-commit run --all-files` + +**Dependencies:** Tasks 8, 9 + +**Files likely touched:** + +- `agents/issue-triage/issue_triage/agent.py` +- `agents/issue-triage/pyproject.toml` + +**Estimated scope:** M + +#### Task 15: Tests for updated reference agent + +**Description:** Update `tests/test_agent_server.py` to reflect the new 202 response +and mock `ForemanClient` at the boundary. +Test startup poll behaviour. + +**Acceptance criteria:** + +- [x] `POST /task` returns 202 (not 200) +- [x] Background task is triggered; `client.next_task()` and `client.complete_task()` called +- [x] `client.next_task()` returning `None` does not crash the background task +- [x] Startup poll fires `client.next_task()` once on lifespan start + +**Verification:** + +- [x] `uv run pytest --agent-digest=term tests/test_agent_server.py` +- [x] `pre-commit run --all-files` + +**Dependencies:** Task 14 + +**Files likely touched:** + +- `tests/test_agent_server.py` + +**Estimated scope:** S + +### Checkpoint: Phase 5 + +- [x] `uv run pytest --agent-digest=term` — full suite passes (261 tests) +- [x] Reference agent uses `ForemanClient`; no inline protocol models remain +- [x] Human review before proceeding + +### Phase 6: Documentation and Integration + +#### Task 16: Write `docs/how-to/write-an-agent.md` + +**Description:** Agent author guide covering: installing `foreman-client`, the three-method API +(`next_task`, `complete_task`, `heartbeat`), +heartbeat requirements (every 30 s during long LLM calls), idempotency contract +(`task_id` as idempotency key), and a minimal working example using `ForemanClient`. + +**Acceptance criteria:** + +- [x] Covers: install, `ForemanClient.__init__` args, `next_task()`, `complete_task()`, `heartbeat()` +- [x] Explains claim timeout and heartbeat cadence requirement +- [x] Explains idempotency: what to do if `next_task()` returns an already-processed task +- [x] Includes a ≤30-line end-to-end example agent using `ForemanClient` +- [x] Doc is in `docs/howtos/write-an-agent.md` (project uses `howtos/` convention) + +**Verification:** + +- [x] Human reads and approves the draft + +**Dependencies:** Tasks 8, 14 + +**Files likely touched:** + +- `docs/how-to/write-an-agent.md` (new) + +**Estimated scope:** S + +#### Task 17: Integration test — agent restart resilience + +**Description:** Write `tests/test_integration.py` +(extend existing file) +with a test that satisfies the MVP acceptance criterion: zero task loss under a simulated agent restart. + +Use real local processes (not mocks): spin up the harness and the reference agent, enqueue a task, stop the agent, +restart it, assert the task reaches `status=done` in `queue.db`. + +**Acceptance criteria:** + +- [x] Test spins up harness (subprocess or `TestClient` + real `TaskQueue`) +- [x] GitHub poller event is injected (mock the poller, call `dispatcher.dispatch()` directly) +- [x] Agent is stopped immediately after task is enqueued (before it can claim) +- [x] Agent is restarted; startup poll picks up the pending task +- [x] `task_queue` row reaches `status=done` +- [x] `action_log` has an entry for the decision +- [x] Test is marked `@pytest.mark.integration` and skipped in CI unless `--run-integration` flag is set + +**Verification:** + +- [x] `uv run pytest --agent-digest=term -m integration --run-integration tests/test_integration.py` +- [x] Human observes the test pass end-to-end + +**Dependencies:** Tasks 12, 14 + +**Files likely touched:** + +- `tests/test_integration.py` +- `conftest.py` (add `--run-integration` flag if not present) + +**Estimated scope:** L + +### Checkpoint: Phase 6 (Final) + +- [x] `uv run pytest --agent-digest=term` — full unit suite passes (261 + 1 skipped) +- [x] Integration test passes manually + (`uv run pytest --run-integration tests/test_integration.py::TestAgentRestartResilience`) +- [x] `pre-commit run --all-files` — clean +- [x] `docs/how-to/write-an-agent.md` approved +- [x] No synchronous dispatch path exists anywhere in the codebase +- [x] Human sign-off before merge + +## Risks and Mitigations + +| Risk | Impact | Mitigation | +|----------------------------------------------------------|--------|-------------------------------------------------------------------------------------------------------------------------------------------------------| +| SQLite concurrency under concurrent claim | High | Use `BEGIN IMMEDIATE` transaction in `claim_next()` — SQLite serialises writes, preventing double-claim | +| `foreman-client` endpoint contract diverges from harness | High | Define request/response Pydantic models in `foreman/routers/queue.py` and reference them in `foremanclient/models.py` (or keep them in sync manually) | +| Drain loop misses a completed task | Medium | Background poll every 10 s is the safety net; `/harness/result` nudge is the fast path | +| Agent processes same task twice after restart | Medium | `task_id` idempotency key in `action_log` (existing invariant, preserved) | +| `foreman-client` is sync but agent is async | Low | `httpx` supports both sync and async; document that authors should use `asyncio.to_thread()` if calling from async context | + +## Out of Scope (MVP) + +Per spec §10: multiple agents per queue, external backends, prioritization, monitoring UI, `GET /queue/status`. diff --git a/docs/specs/02-messaging-update/pr-21-fixes.md b/docs/specs/02-messaging-update/pr-21-fixes.md new file mode 100644 index 0000000..f6d59bd --- /dev/null +++ b/docs/specs/02-messaging-update/pr-21-fixes.md @@ -0,0 +1,162 @@ +# PR #21 Fix Plan + +Implements all seven findings from `pr-21-review.md`. +Tasks are ordered: high-priority blockers first, then medium, then low. + +--- + +## Task 1 — Wrap `_drain_loop` body in exception handlers + +**File:** `foreman/server.py:131-149` **Priority:** High (must fix before merge) + +Wrap the outer `drain_completed()` call in a broad `try/except` that logs and continues so the loop never dies. +Wrap each per-task `executor.execute()` + `memory.upsert_memory_summary()` block in a separate inner `try/except` +so one bad task does not abort the others. +Also extend the `_lifespan` finally block's `contextlib.suppress` to include `Exception` +so a previously-crashed drain task does not re-raise during shutdown. + +**Acceptance criteria:** + +- [x] `_drain_loop` has an outer `try/except Exception` around `drain_completed()` that calls `logger.exception` + and continues the loop +- [x] `_drain_loop` has an inner `try/except Exception` around `executor.execute()` + `memory.upsert_memory_summary()` per + task +- [x] `_lifespan` finally block uses `contextlib.suppress(asyncio.CancelledError, Exception)` for `drain_task` +- [x] New test: a drain-loop iteration that raises inside `executor.execute` does not kill the loop + (subsequent iteration still runs) +- [x] Full test suite passes + +--- + +## Task 2 — Split `drain_completed` / add `mark_done`; transition `done` after execute + +**Files:** `foreman/queue.py:179-183`, `foreman/server.py:138-146` **Priority:** High (must fix before merge) + +Remove the `UPDATE … done` + `commit` from `drain_completed` so it only reads rows. +Add a new `mark_done(task_id: str) -> None` method that transitions a single row to `done` and commits. +In `_drain_loop`, call `task_queue.mark_done(task.task_id)` after `executor.execute()` succeeds. + +This gives at-least-once delivery: a crash between `execute` and `mark_done` causes re-drain on next start, +which matches the stated design goal. + +Note: `drain_completed` tests in `test_queue.py` that assert `status == 'done'` after the call must be updated — +`drain_completed` now leaves status as `completed`; `mark_done` transitions to `done`. + +**Acceptance criteria:** + +- [x] `drain_completed` no longer contains any `UPDATE` or `commit` call +- [x] `mark_done(task_id)` method exists on `TaskQueue`, transitions one row `completed → done`, commits +- [x] `_drain_loop` calls `task_queue.mark_done(task.task_id)` inside the per-task try block, + after `memory.upsert_memory_summary()` +- [x] Existing `drain_completed` tests updated to reflect that rows remain `completed` after the call +- [x] New test: `mark_done` transitions the correct row to `done` and leaves other rows untouched +- [x] New test: executor failure leaves the task in `completed` state (not `done`) +- [x] Full test suite passes + +--- + +## Task 3 — Drain all queued tasks on agent startup (loop until empty) + +**File:** `agents/issue-triage/issue_triage/agent.py:82` **Priority:** Medium (should fix) + +Replace the single `await _poll_and_process(client)` call in `_lifespan` with a loop +that calls `client.next_task()` repeatedly until it returns `None`, processing each task before moving to the next. + +**Acceptance criteria:** + +- [x] `_lifespan` startup poll loops: + `while True: task = await asyncio.to_thread(client.next_task); if task is None: break; await _process_task(client, task)` +- [x] New integration test: 3 tasks enqueued while agent is "down"; agent restart claims and processes all 3 + (no stuck `pending` rows) +- [x] Full test suite passes + +--- + +## Task 4 — Wrap `_requeue_loop` body in exception handler + +**File:** `foreman/server.py:163-168` **Priority:** Medium (should fix) + +Mirror the fix from Task 1: wrap the `requeue_stale()` + `fail_exhausted()` block in `try/except Exception` with +`logger.exception`. +Also extend `_lifespan` finally block's `contextlib.suppress` for `requeue_task` to include `Exception`. + +**Acceptance criteria:** + +- [x] `_requeue_loop` has `try/except Exception` around `requeue_stale()` + `fail_exhausted()` that logs and continues +- [x] `_lifespan` finally block uses `contextlib.suppress(asyncio.CancelledError, Exception)` for `requeue_task` +- [x] New test: a requeue-loop iteration that raises does not kill the loop +- [x] Full test suite passes + +--- + +## Task 5 — Remove private-attribute access across module boundary + +**Files:** `foreman/server.py`, `foreman/__main__.py:167` **Priority:** Low (nice to have) + +Rename `Dispatcher._executor` to `Dispatcher.executor` (public). +Update `__main__.py` to use `dispatcher.executor`. + +**Acceptance criteria:** + +- [x] `Dispatcher.__init__` assigns `self.executor` (not `self._executor`) +- [x] `__main__.py` references `dispatcher.executor` +- [x] No remaining references to `dispatcher._executor` in the codebase +- [x] Full test suite passes + +--- + +## Task 6 — Add heartbeat thread to reference agent `_process_task` + +**File:** `agents/issue-triage/issue_triage/agent.py:52-60` **Priority:** Low (nice to have) + +Wrap the `asyncio.to_thread(triage, task)` call with a daemon heartbeat thread +that fires `client.heartbeat(task.task_id)` every 25 seconds until the triage call finishes. + +**Acceptance criteria:** + +- [x] `_process_task` starts a daemon `threading.Thread` that calls `client.heartbeat(task.task_id)` every 25 s +- [x] The heartbeat thread is stopped (via `threading.Event`) in a `finally` block after `triage` returns or raises +- [x] `import threading` added to `agent.py` +- [x] Full test suite passes + +--- + +## Task 7 — Update minimal working example in docs to include startup poll + +**File:** `docs/howtos/write-an-agent.md:125-156` **Priority:** Low (nice to have) + +Replace the module-level `ForemanClient` instantiation +and bare `FastAPI()` with a proper `@asynccontextmanager lifespan` that: creates the client, runs a startup poll +(claiming any queued tasks), yields, and closes the client. +Pass the lifespan to `FastAPI(lifespan=lifespan)`. + +**Acceptance criteria:** + +- [x] Minimal example uses `@asynccontextmanager` lifespan (import from `contextlib`) +- [x] Lifespan creates `ForemanClient`, calls `next_task()` + `complete_task()` in a startup-poll loop, yields, + calls `client.close()` +- [x] `FastAPI(lifespan=lifespan)` used instead of bare `FastAPI()` +- [x] A note is added (or the startup-poll section is cross-linked) so readers understand why the lifespan is needed +- [x] `rumdl` / pre-commit passes + +--- + +## Implementation Order + +```text +Task 2 (queue.py: split drain_completed / add mark_done) +Task 1 (server.py: wrap _drain_loop — depends on mark_done existing) +Task 4 (server.py: wrap _requeue_loop — independent, batch with Task 1 or separate) +Task 3 (agent.py: startup drain loop) +Task 5 (server.py + __main__.py: publicize executor attr) +Task 6 (agent.py: heartbeat thread) +Task 7 (docs: minimal example lifespan) +``` + +Tasks 2 and 1 are tightly coupled (Task 1's drain loop calls `mark_done`); implement them together. +Tasks 4, 5 are independent and can each be a single commit. +Tasks 6, 7 are independent documentation/agent polish. + +--- + +*Plan created 2026-05-05.* *Derived from `pr-21-review.md`.* *All tasks completed 2026-05-05.* diff --git a/docs/specs/02-messaging-update/pr-21-review.md b/docs/specs/02-messaging-update/pr-21-review.md new file mode 100644 index 0000000..9d930b4 --- /dev/null +++ b/docs/specs/02-messaging-update/pr-21-review.md @@ -0,0 +1,352 @@ +# PR Review: Update task management flow and add integration tests + +## Executive Summary + +| Aspect | Value | +|--------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| **PR Goal** | Implement a complete queue-mediated agent protocol: SQLite task queue, HTTP endpoints, `foreman-client` SDK, background drain/requeue loops, agent restart resilience | +| **Files Changed** | 41 (4,781 additions / 714 deletions) | +| **Risk Level** | 🟡 MEDIUM — core queue mechanics and test coverage are solid; two structural bugs in the drain pipeline need attention before shipping | +| **Review Effort** | 4/5 — six implementation phases spanning new package, background loops, agent protocol, integration test, and documentation | +| **Recommendation** | 🔄 REQUEST CHANGES | + +**Affected Areas**: `foreman/queue.py`, `foreman/server.py`, `foreman/routers/queue.py`, `foreman/routers/result.py`, +`foreman/__main__.py`, `foreman-client/`, `agents/issue-triage/issue_triage/agent.py`, `tests/test_integration.py` + +**Business Impact**: This PR enables the zero-task-loss guarantee that is the MVP acceptance criterion. +The queue, client SDK, and integration test are well-constructed. +Two bugs in the drain pipeline can silently drop GitHub actions +or permanently halt task processing on any transient error. + +**Flow Changes**: Replaces synchronous `POST→parse→execute` dispatch with durable enqueue + fire-and-forget nudge. +Decisions are now drained asynchronously by a background loop. +Agent startup polls the queue on boot for resilience. + +## Ratings + +| Aspect | Score | +|-----------------|-------| +| Correctness | 3/5 | +| Security | 5/5 | +| Performance | 5/5 | +| Maintainability | 4/5 | + +## PR Health + +- [x] Has clear description +- [x] References implementation plan (docs/specs/02-messaging-update/plan.md) +- [x] Appropriate size (6 phases — large but well-scoped) +- [x] Has relevant tests (261 unit tests + 1 integration test) + +--- + +## High Priority Issues + +(Must fix before merge) + +### 🐛 #1: `_drain_loop` crashes permanently on any executor or DB exception + +**Location:** `foreman/server.py:131-149` + +**Confidence:** ✅ HIGH + +The `while True` loop runs `task_queue.drain_completed()`, `executor.execute()`, +and `memory.upsert_memory_summary()` with no exception handling. +Any raise — GitHub API rate limit, bad credentials, transient SQLite I/O error — +exits `while True` and the asyncio task dies. +After that every `drain_event.set()` call from `/queue/complete` and `/harness/result` is a no-op: the loop is dead, +completed tasks pile up in the queue, and GitHub actions are never taken. +There is no alert, no restart, no visible signal to the operator. + +A second consequence: in `_lifespan`'s `finally` block, `await drain_task` re-raises any non-`CancelledError` exception. +If the drain task crashed earlier with (e.g.) `sqlite3.OperationalError`, +the re-raise propagates out of the `contextlib.suppress(asyncio.CancelledError)` guard and can disrupt clean shutdown. + +```diff +# foreman/server.py — inside _drain_loop while loop + drain_event.clear() + +- pairs = task_queue.drain_completed() +- for task, decision in pairs: +- issue_number: int = task.payload.get("number", 0) +- executor.execute(decision, repo=task.repo, issue_number=issue_number, task_type=task.type) +- summary = f"decision={decision.decision.value}; rationale={decision.rationale}" +- memory.upsert_memory_summary(task.repo, issue_number, summary) +- if pairs: +- logger.info("Drain loop processed tasks", count=len(pairs)) ++ try: ++ pairs = task_queue.drain_completed() ++ for task, decision in pairs: ++ issue_number: int = task.payload.get("number", 0) ++ try: ++ executor.execute(decision, repo=task.repo, issue_number=issue_number, task_type=task.type) ++ summary = f"decision={decision.decision.value}; rationale={decision.rationale}" ++ memory.upsert_memory_summary(task.repo, issue_number, summary) ++ except Exception: ++ logger.exception("Drain loop: failed to execute task", task_id=task.task_id) ++ if pairs: ++ logger.info("Drain loop processed tasks", count=len(pairs)) ++ except Exception: ++ logger.exception("Drain loop: unexpected error, continuing") + +# foreman/server.py — _lifespan finally block +- with contextlib.suppress(asyncio.CancelledError): +- await drain_task ++ with contextlib.suppress(asyncio.CancelledError, Exception): ++ await drain_task +``` + +--- + +### 🐛 #2: `drain_completed` marks tasks `done` before executing actions — executor failures silently drop GitHub actions + +**Location:** `foreman/queue.py:179-183`, `foreman/server.py:138-146` | **Confidence:** ✅ HIGH + +`drain_completed()` atomically updates all completed rows to `done` +and commits to SQLite **before** returning the list to the caller. +`_drain_loop` then calls `executor.execute()` on those rows. +If the executor raises (network error, GitHub 403, etc.), the task is already `done` — it will never be retried, +and the GitHub action (add label, post comment, close issue) is silently skipped with no record in `action_log`. + +Combined with issue #1 (the loop then crashes), one bad executor call causes both action loss and drain-loop death. + +The fix is to move the `done` transition to after a successful execute, on a per-task basis: + +```diff +# foreman/queue.py — drain_completed: remove batch UPDATE/commit + def drain_completed(self) -> list[tuple[TaskMessage, DecisionMessage]]: + rows = self._conn.execute( + "SELECT task_id, payload, result FROM task_queue WHERE status = 'completed'" + ).fetchall() + if not rows: + return [] +- task_ids = [r[0] for r in rows] +- placeholders = ",".join("?" * len(task_ids)) +- self._conn.execute( +- f"UPDATE task_queue SET status = 'done' WHERE task_id IN ({placeholders})", +- task_ids, +- ) +- self._conn.commit() + return [ + (_TaskMessage.model_validate_json(payload), _DecisionMessage.model_validate_json(result)) + for _, payload, result in rows + ] + ++def mark_done(self, task_id: str) -> None: ++ """Transition a single completed task to done after its action is executed. ++ ++ Args: ++ task_id: ID of the task to mark done. ++ """ ++ self._conn.execute("UPDATE task_queue SET status = 'done' WHERE task_id = ?", (task_id,)) ++ self._conn.commit() + +# foreman/server.py — _drain_loop: call mark_done per task, after execute succeeds + for task, decision in pairs: + issue_number: int = task.payload.get("number", 0) + executor.execute(decision, repo=task.repo, issue_number=issue_number, task_type=task.type) + summary = f"decision={decision.decision.value}; rationale={decision.rationale}" + memory.upsert_memory_summary(task.repo, issue_number, summary) ++ task_queue.mark_done(task.task_id) +``` + +Note: with this change, a process crash between `executor.execute()` +and `mark_done()` means the task is re-drained on next startup (at-least-once delivery for GitHub actions). +This is correct — it matches the stated design goal. + +--- + +## Medium Priority Issues + +(Should fix, not blocking) + +### 🐛 #3: Startup poll claims only one task — N−1 tasks queued during downtime are permanently stuck + +**Location:** `agents/issue-triage/issue_triage/agent.py:82` | **Confidence:** ✅ HIGH + +`_lifespan` calls `_poll_and_process` exactly once. +If 3 tasks accumulated while the agent was down, 1 is claimed; the other 2 remain `pending` indefinitely. +They are not `claimed`, so `requeue_stale()` never touches them. +The harness sends nudges only when new tasks are enqueued, not retroactively for pre-existing `pending` tasks. +Those tasks effectively vanish from the agent's perspective — no nudge, no retry, no failure. + +```diff +# agents/issue-triage/issue_triage/agent.py — _lifespan startup poll + async def _lifespan(application: FastAPI) -> AsyncIterator[None]: + client = _get_client(application) +- await _poll_and_process(client) ++ while True: ++ task = await asyncio.to_thread(client.next_task) ++ if task is None: ++ break ++ await _process_task(client, task) + yield + client.close() +``` + +The integration test in `test_integration.py` only covers the single-task case (step 1 enqueues exactly one task). +A second test covering N>1 pending tasks would guard this path. + +--- + +### 🐛 #4: `_requeue_loop` has the same no-exception-handling problem as `_drain_loop` + +**Location:** `foreman/server.py:163-168` | **Confidence:** ✅ HIGH + +If `task_queue.requeue_stale()` or `task_queue.fail_exhausted()` raises, +the requeue loop exits `while True` and dies permanently. +Stale claimed tasks are never recycled; exhausted tasks are never failed. +The same shutdown re-raise risk applies. + +```diff +# foreman/server.py — inside _requeue_loop while loop + while True: + await asyncio.sleep(config.queue.requeue_interval_seconds) +- requeued = task_queue.requeue_stale() +- failed = task_queue.fail_exhausted(max_retries=config.queue.max_retries) +- logger.info("Requeue cycle", requeued=requeued, failed=failed) ++ try: ++ requeued = task_queue.requeue_stale() ++ failed = task_queue.fail_exhausted(max_retries=config.queue.max_retries) ++ logger.info("Requeue cycle", requeued=requeued, failed=failed) ++ except Exception: ++ logger.exception("Requeue loop: unexpected error, continuing") + +# foreman/server.py — _lifespan finally block (same fix as #1) +- with contextlib.suppress(asyncio.CancelledError): +- await requeue_task ++ with contextlib.suppress(asyncio.CancelledError, Exception): ++ await requeue_task +``` + +--- + +## Low Priority Issues + +(Nice to have) + +### 🏗️ #5: Private attribute accessed across module boundary + +**Location:** `foreman/__main__.py:167` | **Confidence:** ✅ HIGH + +`app.state.executor = dispatcher._executor` reaches into `Dispatcher`'s private state. +If the attribute is renamed, this silently becomes `AttributeError` at runtime +(not caught by mypy's `--ignore-missing-imports`). +Expose it via a public attribute or property. + +```diff +# foreman/server.py — Dispatcher.__init__ +- self._executor = GitHubExecutor(token=str(config.identity.github_token), memory=memory) ++ self.executor = GitHubExecutor(token=str(config.identity.github_token), memory=memory) + +# foreman/__main__.py +- app.state.executor = dispatcher._executor ++ app.state.executor = dispatcher.executor +``` + +--- + +### 🐛 #6: Reference agent has no heartbeat during LLM call + +**Location:** `agents/issue-triage/issue_triage/agent.py:52-60` | **Confidence:** ✅ HIGH + +`_process_task` runs `triage(task)` (a synchronous LLM call) via `asyncio.to_thread` with no heartbeat. +If the LLM call exceeds `claim_timeout_seconds` (default 300 s), the harness requeues the task. +The next nudge or startup poll claims it again, causing double-processing. +The docs show the heartbeat-thread pattern; the reference implementation should model it. + +```diff +# agents/issue-triage/issue_triage/agent.py ++import threading ++ + async def _process_task(client: ForemanClient, task: TaskMessage) -> None: +- decision = await asyncio.to_thread(triage, task) ++ stop = threading.Event() ++ ++ def _hb(): ++ while not stop.wait(timeout=25): ++ client.heartbeat(task.task_id) ++ ++ hb_thread = threading.Thread(target=_hb, daemon=True) ++ hb_thread.start() ++ try: ++ decision = await asyncio.to_thread(triage, task) ++ finally: ++ stop.set() + await asyncio.to_thread(client.complete_task, task.task_id, decision) +``` + +--- + +### 🎨 #7: Minimal working example in docs omits startup poll + +**Location:** `docs/howtos/write-an-agent.md:125-156` | **Confidence:** ✅ HIGH + +The 30-line "Minimal Working Example" instantiates `ForemanClient` at module level and has no lifespan. +A reader who copies it verbatim gets an agent without the zero-task-loss recovery path. +The startup poll section appears later but many readers won't reach it. +The example should include a minimal lifespan, +or a note should be added that the example is incomplete for production use. + +```diff +-client = ForemanClient(os.environ["FOREMAN_HARNESS_URL"], os.environ["AGENT_URL"]) +-app = FastAPI() ++from contextlib import asynccontextmanager ++ ++@asynccontextmanager ++async def lifespan(app): ++ client = ForemanClient(os.environ["FOREMAN_HARNESS_URL"], os.environ["AGENT_URL"]) ++ app.state.client = client ++ task = client.next_task() # startup poll — pick up tasks queued while down ++ if task: ++ client.complete_task(task.task_id, _decide(task)) ++ yield ++ client.close() ++ ++app = FastAPI(lifespan=lifespan) +``` + +--- + +## Flow Impact Analysis + +**Before this PR**: `Dispatcher.dispatch()` → synchronous `POST /task` → parse `DecisionMessage` → `executor.execute()` +(all in-request, blocking). + +**After this PR**: + +```text +Dispatcher.dispatch() + → task_queue.enqueue() [durable SQLite write] + → POST /task {task_id} [fire-and-forget nudge; failure is safe] + +Agent: + POST /task nudge received + → background_tasks.add_task(_poll_and_process) + → client.next_task() [POST /queue/next → claim] + → triage(task) [LLM call] + → client.complete_task() [POST /queue/complete → status=completed] + [POST /harness/result → drain_event.set()] + +_drain_loop (background): + ← drain_event wakes loop + → task_queue.drain_completed() [SELECT completed, UPDATE done — see issue #2] + → executor.execute() [GitHub API] + → memory.upsert_memory_summary() + +_requeue_loop (background, every 60 s): + → task_queue.requeue_stale() [claimed + timed-out → pending] + → task_queue.fail_exhausted() [pending + retries ≥ max → failed] +``` + +**Changed callers of `Dispatcher`**: `__main__._run_loop` +(unchanged call site; Dispatcher now requires `task_queue` arg). +All existing integration tests updated correctly. + +**Affected by issue #2**: `drain_completed` tests in `test_queue.py` will need updating once `mark_done` is split out. +The integration test `test_pending_task_claimed_on_restart` checks `status in ("completed", "done")` — +it will still pass after the fix since `drain_completed` no longer transitions to `done`. + +--- + +*Review conducted on PR #21 against branch `noble-cupcake` → `main`, 2026-05-04.* diff --git a/foreman-client/foremanclient/__init__.py b/foreman-client/foremanclient/__init__.py new file mode 100644 index 0000000..cf6795f --- /dev/null +++ b/foreman-client/foremanclient/__init__.py @@ -0,0 +1,15 @@ +"""Foreman client — HTTP client for the Foreman agent harness.""" + +from foremanclient.client import ForemanClient, ForemanClientError +from foremanclient.models import ActionItem, DecisionMessage, DecisionType, LLMBackendRef, TaskContext, TaskMessage + +__all__ = [ + "ActionItem", + "DecisionMessage", + "DecisionType", + "ForemanClient", + "ForemanClientError", + "LLMBackendRef", + "TaskContext", + "TaskMessage", +] diff --git a/foreman-client/foremanclient/client.py b/foreman-client/foremanclient/client.py new file mode 100644 index 0000000..7e31116 --- /dev/null +++ b/foreman-client/foremanclient/client.py @@ -0,0 +1,128 @@ +"""HTTP client for the Foreman agent harness.""" + +from __future__ import annotations + +import httpx +import structlog + +from foremanclient.models import DecisionMessage, TaskMessage + +logger = structlog.get_logger(__name__) + + +class ForemanClientError(Exception): + """Raised when the Foreman harness returns a non-2xx HTTP response. + + Args: + status_code: The HTTP status code returned by the harness. + message: A description of the error. + """ + + def __init__(self, status_code: int, message: str) -> None: + self.status_code = status_code + self.message = message + super().__init__(f"HTTP {status_code}: {message}") + + +class ForemanClient: + """Synchronous HTTP client for the Foreman agent harness. + + Wraps the three queue endpoints an agent needs: claim a task, complete a + task, and send a heartbeat to extend the claim window. + + Args: + harness_url: Base URL of the running Foreman harness + (e.g. ``"http://localhost:8000"``). + agent_url: This agent's own base URL, sent when claiming tasks so + the harness knows which agent holds the claim. + timeout: HTTP request timeout in seconds applied to all requests. + Defaults to ``5.0``. + + Example: + >>> with ForemanClient( + ... harness_url="http://localhost:8000", + ... agent_url="http://localhost:9001", + ... ) as client: + ... task = client.next_task() + ... if task: + ... client.complete_task(task.task_id, decision) + """ + + def __init__(self, harness_url: str, agent_url: str, timeout: float = 5.0) -> None: + self._agent_url = agent_url + self._http = httpx.Client(base_url=harness_url, timeout=timeout) + + def close(self) -> None: + """Close the underlying HTTP connection pool.""" + self._http.close() + + def __enter__(self) -> ForemanClient: + return self + + def __exit__(self, *_: object) -> None: + self.close() + + def next_task(self) -> TaskMessage | None: + """Claim and return the next pending task from the harness queue. + + Sends ``POST /queue/next`` with this agent's URL. The harness + atomically marks the task as claimed and returns it. + + Returns: + A :class:`~foremanclient.models.TaskMessage` when a task is + available, or ``None`` when the queue is empty (HTTP 204). + """ + response = self._http.post("/queue/next", json={"agent_url": self._agent_url}) + log = logger.bind(method="next_task", status_code=response.status_code) + if response.status_code == 204: + log.debug("Queue empty") + return None + if response.is_success: + task = TaskMessage.model_validate(response.json()) + log.debug("Task claimed", task_id=task.task_id) + return task + log.warning("next_task failed", body=response.text) + raise ForemanClientError(response.status_code, response.text) + + def complete_task(self, task_id: str, decision: DecisionMessage) -> None: + """Store a completed decision and nudge the harness drain loop. + + Sends ``POST /queue/complete`` with the full decision, then + ``POST /harness/result`` to wake the drain loop immediately. + + Args: + task_id: The ``task_id`` from the original + :class:`~foremanclient.models.TaskMessage`. + decision: The agent's :class:`~foremanclient.models.DecisionMessage` + to store. + """ + log = logger.bind(method="complete_task", task_id=task_id) + + complete_resp = self._http.post("/queue/complete", json=decision.model_dump(mode="json")) + if not complete_resp.is_success: + log.warning("complete_task /queue/complete failed", status_code=complete_resp.status_code) + raise ForemanClientError(complete_resp.status_code, complete_resp.text) + log.debug("Decision stored", status_code=complete_resp.status_code) + + nudge_resp = self._http.post("/harness/result", json={"task_id": task_id}) + if not nudge_resp.is_success: + log.warning("complete_task /harness/result failed", status_code=nudge_resp.status_code) + raise ForemanClientError(nudge_resp.status_code, nudge_resp.text) + log.debug("Drain nudge sent", status_code=nudge_resp.status_code) + + def heartbeat(self, task_id: str) -> None: + """Extend the claim window for an in-progress task. + + Sends ``POST /queue/heartbeat`` to reset the harness timeout clock. + Agents processing long LLM calls should call this at least once every + 30 seconds to prevent the task from being re-queued. + + Args: + task_id: The ``task_id`` of the currently claimed task. + """ + response = self._http.post("/queue/heartbeat", json={"task_id": task_id}) + log = logger.bind(method="heartbeat", task_id=task_id, status_code=response.status_code) + if not response.is_success: + log.warning("heartbeat failed") + raise ForemanClientError(response.status_code, response.text) + log.debug("Heartbeat sent") diff --git a/foreman-client/foremanclient/models.py b/foreman-client/foremanclient/models.py new file mode 100644 index 0000000..bd623c8 --- /dev/null +++ b/foreman-client/foremanclient/models.py @@ -0,0 +1,87 @@ +"""Pydantic models for the Foreman harness↔agent message protocol. + +Standalone definitions — no dependency on the ``foreman`` package. +""" + +from __future__ import annotations + +from enum import Enum +from typing import Any, Optional + +from pydantic import BaseModel + + +class ActionItem(BaseModel): + """A single action to be executed by the harness after a decision. + + Extra fields are allowed to support future action types without schema changes. + """ + + model_config = {"extra": "allow"} + + type: str + """Action type identifier (e.g. ``add_label``, ``comment``).""" + + +class LLMBackendRef(BaseModel): + """Reference to the LLM backend the agent should use.""" + + provider: str + """LLM provider identifier (e.g. ``anthropic``, ``ollama``).""" + + model: str + """Model name / identifier (e.g. ``claude-sonnet-4-6``).""" + + +class TaskContext(BaseModel): + """Context injected by the harness into each task.""" + + llm_backend: LLMBackendRef + """LLM backend the agent should use for this task.""" + + memory_summary: Optional[str] = None + """LLM-generated summary of prior actions on this issue/repo, if any.""" + + +class TaskMessage(BaseModel): + """Task sent from the harness to an agent container.""" + + task_id: str + """Unique identifier for this task (UUID4).""" + + type: str + """Task type (e.g. ``issue.triage``).""" + + repo: str + """Repository in ``owner/repo`` format.""" + + payload: dict[str, Any] + """Raw GitHub event payload.""" + + context: TaskContext + """Harness-injected context (memory summary, LLM backend).""" + + +class DecisionType(str, Enum): + """Valid agent decision values.""" + + label_and_respond = "label_and_respond" + close = "close" + escalate = "escalate" + skip = "skip" + + +class DecisionMessage(BaseModel): + """Decision returned from an agent to the harness.""" + + task_id: str + """Must match the ``task_id`` from the corresponding :class:`TaskMessage`.""" + + decision: DecisionType + """The agent's decision on how to handle the task.""" + + rationale: str + """Human-readable explanation of the decision.""" + + actions: list[ActionItem] = [] + """Ordered list of actions for the harness to execute.""" diff --git a/foreman-client/pyproject.toml b/foreman-client/pyproject.toml new file mode 100644 index 0000000..99f2288 --- /dev/null +++ b/foreman-client/pyproject.toml @@ -0,0 +1,145 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "foreman-client" +version = "0.1.0" +description = "HTTP client for the Foreman agent harness." +requires-python = ">=3.12" +dependencies = [ + "httpx>=0.28.1", + "pydantic>=2.0", + "structlog>=25.5.0", +] +authors = [ + { name = "Corey Oordt", email = "coreyoordt@gmail.com" }, +] + +[tool.hatch.build.targets.wheel] +packages = ["foremanclient"] + +[dependency-groups] +test = [ + "pytest", + "pytest-cov", + "pytest-mock", + "pytest-agent-digest", + "respx", + "coverage", +] + +[tool.uv] +default-groups = ["test"] + +[tool.coverage.run] +branch = true +omit = ["**/test_*.py"] +source = ["foremanclient"] + +[tool.coverage.report] +show_missing = true +exclude_lines = [ + "pragma: no cover", + "pragma: no-coverage", + "^\\s*raise AssertionError\\b", + "^\\s*raise NotImplementedError\\b", + "^\\s*return NotImplemented\\b", + "^\\s*raise$", + "^if (False|TYPE_CHECKING):", + ": \\.\\.\\.(\\s*#.*)?$", + "^ +\\.\\.\\.$", + "-> ['\"]?NoReturn['\"]?:", +] + +[tool.pytest.ini_options] +addopts = [ + "--cov=foremanclient", + "--cov-branch", + "--cov-report=term", +] +norecursedirs = [ + ".*", + "build", + "dist", + "{arch}", + "*.egg", + "venv", + "requirements*", + "lib", +] +python_files = [ + "test_*.py", + "*_test.py", + "tests.py", +] + +[tool.interrogate] +ignore-init-method = true +ignore-init-module = false +ignore-magic = true +ignore-semiprivate = false +ignore-private = false +ignore-property-decorators = false +ignore-module = false +ignore-nested-functions = true +ignore-nested-classes = true +ignore-setters = false +fail-under = 90 +exclude = ["setup.py", "docs", "build"] +ignore-regex = ["^get$", "^mock_.*", ".*BaseClass.*"] +verbose = 0 +quiet = false +whitelist-regex = [] +color = true + +[tool.ruff] +exclude = [ + ".bzr", ".direnv", ".eggs", ".git", ".hg", ".mypy_cache", ".nox", + ".pants.d", ".pytype", ".ruff_cache", ".svn", ".tox", ".venv", + "__pypackages__", "_build", "buck-out", "build", "dist", + "node_modules", "venv", +] +line-length = 119 + +[tool.ruff.lint] +preview = true +select = [ + "A", "ANN", "B", "BLE", "C4", "C90", "D", "E", "EXE", "F", "I", "N", + "NPY", "PD", "PERF", "PGH", "PLC", "PLE", "PLW", "Q", "RUF", "S", + "SIM", "TCH", "W", +] +ignore = [ + "ANN002", "ANN003", "ANN204", "ANN401", "D105", "D106", "D107", + "D200", "D212", "PD011", "PLC0415", "PLW0108", "PLW1641", "S101", + "S104", "S404", "S602", +] +fixable = ["ALL"] +unfixable = [] +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" +typing-modules = ["typing", "types", "typing_extensions", "mypy", "mypy_extensions"] + +[tool.ruff.lint.per-file-ignores] +"tests/*" = [ + "ANN001", "ANN002", "ANN003", "ANN201", "ANN202", "PGH003", "PLR0912", + "PLR0913", "PLR0915", "PLR2004", "PLW0603", "S101", "S105", "S106", + "TRY003", "TRY201", "TRY301", +] + +[tool.ruff.lint.mccabe] +max-complexity = 10 + +[tool.ruff.lint.isort] +order-by-type = true + +[tool.ruff.lint.pydocstyle] +convention = "google" + +[tool.pydoclint] +style = "google" +exclude = '\.git|tests' +require-return-section-when-returning-nothing = false +arg-type-hints-in-docstring = false +check-return-types = false +skip-checking-raises = true +quiet = true diff --git a/foreman-client/tests/__init__.py b/foreman-client/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/foreman-client/tests/test_client.py b/foreman-client/tests/test_client.py new file mode 100644 index 0000000..b5de8f3 --- /dev/null +++ b/foreman-client/tests/test_client.py @@ -0,0 +1,257 @@ +"""Tests for foremanclient/client.py — ForemanClient.""" + +from __future__ import annotations + +import json +from unittest.mock import patch + +import httpx +import pytest +import respx + +from foremanclient.client import ForemanClient, ForemanClientError +from foremanclient.models import ActionItem, DecisionMessage, DecisionType + +_HARNESS_URL = "http://harness" +_AGENT_URL = "http://agent:9001" + +_TASK_PAYLOAD: dict = { + "task_id": "task-123", + "type": "issue.triage", + "repo": "owner/repo", + "payload": {"number": 42}, + "context": { + "llm_backend": {"provider": "anthropic", "model": "claude-sonnet-4-6"}, + "memory_summary": None, + }, +} + +_DECISION = DecisionMessage( + task_id="task-123", + decision=DecisionType.label_and_respond, + rationale="Looks like a bug.", + actions=[ActionItem(type="add_label", label="bug")], +) + + +def _make_client() -> ForemanClient: + return ForemanClient(harness_url=_HARNESS_URL, agent_url=_AGENT_URL) + + +# --------------------------------------------------------------------------- +# next_task() +# --------------------------------------------------------------------------- + + +class TestNextTask: + """Tests for ForemanClient.next_task().""" + + @respx.mock + def test_returns_task_message_on_200(self) -> None: + """200 response with valid JSON is parsed into a TaskMessage.""" + respx.post(f"{_HARNESS_URL}/queue/next").mock(return_value=httpx.Response(200, json=_TASK_PAYLOAD)) + client = _make_client() + task = client.next_task() + assert task is not None + assert task.task_id == "task-123" + assert task.repo == "owner/repo" + + @respx.mock + def test_sends_agent_url_in_body(self) -> None: + """next_task() sends agent_url in the request body.""" + route = respx.post(f"{_HARNESS_URL}/queue/next").mock(return_value=httpx.Response(200, json=_TASK_PAYLOAD)) + _make_client().next_task() + assert route.called + assert route.calls.last.request.content == b'{"agent_url":"http://agent:9001"}' + + @respx.mock + def test_returns_none_on_204(self) -> None: + """204 No Content means the queue is empty; returns None.""" + respx.post(f"{_HARNESS_URL}/queue/next").mock(return_value=httpx.Response(204)) + assert _make_client().next_task() is None + + @respx.mock + def test_raises_on_server_error(self) -> None: + """Non-2xx response raises ForemanClientError.""" + respx.post(f"{_HARNESS_URL}/queue/next").mock(return_value=httpx.Response(500, text="Internal error")) + with pytest.raises(ForemanClientError) as exc_info: + _make_client().next_task() + assert exc_info.value.status_code == 500 + + @respx.mock + def test_raises_on_4xx(self) -> None: + """4xx response raises ForemanClientError.""" + respx.post(f"{_HARNESS_URL}/queue/next").mock(return_value=httpx.Response(400, text="Bad request")) + with pytest.raises(ForemanClientError) as exc_info: + _make_client().next_task() + assert exc_info.value.status_code == 400 + + +# --------------------------------------------------------------------------- +# complete_task() +# --------------------------------------------------------------------------- + + +class TestCompleteTask: + """Tests for ForemanClient.complete_task().""" + + @respx.mock + def test_sends_decision_then_nudge(self) -> None: + """complete_task() calls /queue/complete then /harness/result in order.""" + complete_route = respx.post(f"{_HARNESS_URL}/queue/complete").mock(return_value=httpx.Response(202)) + nudge_route = respx.post(f"{_HARNESS_URL}/harness/result").mock(return_value=httpx.Response(202)) + + _make_client().complete_task("task-123", _DECISION) + + assert complete_route.called + assert nudge_route.called + + @respx.mock + def test_complete_endpoint_receives_decision_json(self) -> None: + """Decision JSON is sent to /queue/complete.""" + route = respx.post(f"{_HARNESS_URL}/queue/complete").mock(return_value=httpx.Response(202)) + respx.post(f"{_HARNESS_URL}/harness/result").mock(return_value=httpx.Response(202)) + + _make_client().complete_task("task-123", _DECISION) + + parsed = json.loads(route.calls.last.request.read()) + assert parsed["task_id"] == "task-123" + assert parsed["decision"] == "label_and_respond" + + @respx.mock + def test_nudge_endpoint_receives_task_id(self) -> None: + """task_id is sent to /harness/result.""" + respx.post(f"{_HARNESS_URL}/queue/complete").mock(return_value=httpx.Response(202)) + route = respx.post(f"{_HARNESS_URL}/harness/result").mock(return_value=httpx.Response(202)) + + _make_client().complete_task("task-123", _DECISION) + + body = json.loads(route.calls.last.request.read()) + assert body["task_id"] == "task-123" + + @respx.mock + def test_raises_on_complete_error(self) -> None: + """ForemanClientError raised when /queue/complete returns non-2xx.""" + respx.post(f"{_HARNESS_URL}/queue/complete").mock(return_value=httpx.Response(500, text="fail")) + + with pytest.raises(ForemanClientError) as exc_info: + _make_client().complete_task("task-123", _DECISION) + assert exc_info.value.status_code == 500 + + @respx.mock + def test_raises_on_nudge_error(self) -> None: + """ForemanClientError raised when /harness/result returns non-2xx.""" + respx.post(f"{_HARNESS_URL}/queue/complete").mock(return_value=httpx.Response(202)) + respx.post(f"{_HARNESS_URL}/harness/result").mock(return_value=httpx.Response(503, text="unavailable")) + + with pytest.raises(ForemanClientError) as exc_info: + _make_client().complete_task("task-123", _DECISION) + assert exc_info.value.status_code == 503 + + @respx.mock + def test_nudge_not_called_when_complete_fails(self) -> None: + """If /queue/complete fails, /harness/result is not called.""" + respx.post(f"{_HARNESS_URL}/queue/complete").mock(return_value=httpx.Response(500)) + nudge_route = respx.post(f"{_HARNESS_URL}/harness/result").mock(return_value=httpx.Response(202)) + + with pytest.raises(ForemanClientError): + _make_client().complete_task("task-123", _DECISION) + + assert not nudge_route.called + + +# --------------------------------------------------------------------------- +# heartbeat() +# --------------------------------------------------------------------------- + + +class TestHeartbeat: + """Tests for ForemanClient.heartbeat().""" + + @respx.mock + def test_sends_task_id(self) -> None: + """heartbeat() sends task_id in the request body.""" + route = respx.post(f"{_HARNESS_URL}/queue/heartbeat").mock(return_value=httpx.Response(202)) + + _make_client().heartbeat("task-123") + + assert route.called + body = json.loads(route.calls.last.request.read()) + assert body["task_id"] == "task-123" + + @respx.mock + def test_succeeds_on_202(self) -> None: + """202 response completes without error.""" + respx.post(f"{_HARNESS_URL}/queue/heartbeat").mock(return_value=httpx.Response(202)) + _make_client().heartbeat("task-123") # should not raise + + @respx.mock + def test_raises_on_error(self) -> None: + """ForemanClientError raised on non-2xx from /queue/heartbeat.""" + respx.post(f"{_HARNESS_URL}/queue/heartbeat").mock(return_value=httpx.Response(400, text="bad")) + + with pytest.raises(ForemanClientError) as exc_info: + _make_client().heartbeat("task-123") + assert exc_info.value.status_code == 400 + + +# --------------------------------------------------------------------------- +# ForemanClient lifecycle (close / context manager) +# --------------------------------------------------------------------------- + + +class TestForemanClientLifecycle: + """Tests for ForemanClient.close() and context manager support.""" + + def test_close_closes_http_client(self) -> None: + """close() delegates to the underlying httpx.Client.close().""" + client = _make_client() + with patch.object(client._http, "close") as mock_close: + client.close() + mock_close.assert_called_once() + + def test_context_manager_closes_on_exit(self) -> None: + """Exiting the context manager calls close().""" + client = _make_client() + with patch.object(client._http, "close") as mock_close: + with client: + pass + mock_close.assert_called_once() + + def test_context_manager_returns_client(self) -> None: + """__enter__ returns the client instance.""" + client = _make_client() + with patch.object(client._http, "close"): + with client as c: + assert c is client + + def test_custom_timeout_forwarded_to_httpx(self) -> None: + """timeout kwarg is forwarded to the underlying httpx.Client.""" + client = ForemanClient(harness_url=_HARNESS_URL, agent_url=_AGENT_URL, timeout=10.0) + assert client._http.timeout == httpx.Timeout(10.0) + + def test_default_timeout_is_five_seconds(self) -> None: + """Default timeout is 5.0 seconds when not specified.""" + client = _make_client() + assert client._http.timeout == httpx.Timeout(5.0) + + +# --------------------------------------------------------------------------- +# ForemanClientError +# --------------------------------------------------------------------------- + + +class TestForemanClientError: + """Tests for the ForemanClientError exception.""" + + def test_attributes(self) -> None: + """status_code and message are accessible as attributes.""" + err = ForemanClientError(404, "not found") + assert err.status_code == 404 + assert err.message == "not found" + + def test_str_representation(self) -> None: + """str() includes status code and message.""" + err = ForemanClientError(500, "server error") + assert "500" in str(err) + assert "server error" in str(err) diff --git a/foreman-client/uv.lock b/foreman-client/uv.lock new file mode 100644 index 0000000..d21b38c --- /dev/null +++ b/foreman-client/uv.lock @@ -0,0 +1,432 @@ +version = 1 +revision = 3 +requires-python = ">=3.12" + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.13.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/14/2c5dd9f512b66549ae92767a9c7b330ae88e1932ca57876909410251fe13/anyio-4.13.0.tar.gz", hash = "sha256:334b70e641fd2221c1505b3890c69882fe4a2df910cba14d97019b90b24439dc", size = 231622, upload-time = "2026-03-24T12:59:09.671Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/42/e921fccf5015463e32a3cf6ee7f980a6ed0f395ceeaa45060b61d86486c2/anyio-4.13.0-py3-none-any.whl", hash = "sha256:08b310f9e24a9594186fd75b4f73f4a4152069e3853f1ed8bfbf58369f4ad708", size = 114353, upload-time = "2026-03-24T12:59:08.246Z" }, +] + +[[package]] +name = "certifi" +version = "2026.4.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/25/ee/6caf7a40c36a1220410afe15a1cc64993a1f864871f698c0f93acb72842a/certifi-2026.4.22.tar.gz", hash = "sha256:8d455352a37b71bf76a79caa83a3d6c25afee4a385d632127b6afb3963f1c580", size = 137077, upload-time = "2026-04-22T11:26:11.191Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/30/7cd8fdcdfbc5b869528b079bfb76dcdf6056b1a2097a662e5e8c04f42965/certifi-2026.4.22-py3-none-any.whl", hash = "sha256:3cb2210c8f88ba2318d29b0388d1023c8492ff72ecdde4ebdaddbb13a31b1c4a", size = 135707, upload-time = "2026-04-22T11:26:09.372Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.13.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/e0/70553e3000e345daff267cec284ce4cbf3fc141b6da229ac52775b5428f1/coverage-7.13.5.tar.gz", hash = "sha256:c81f6515c4c40141f83f502b07bbfa5c240ba25bbe73da7b33f1e5b6120ff179", size = 915967, upload-time = "2026-03-17T10:33:18.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/c3/a396306ba7db865bf96fc1fb3b7fd29bcbf3d829df642e77b13555163cd6/coverage-7.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:460cf0114c5016fa841214ff5564aa4864f11948da9440bc97e21ad1f4ba1e01", size = 219554, upload-time = "2026-03-17T10:30:42.208Z" }, + { url = "https://files.pythonhosted.org/packages/a6/16/a68a19e5384e93f811dccc51034b1fd0b865841c390e3c931dcc4699e035/coverage-7.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e223ce4b4ed47f065bfb123687686512e37629be25cc63728557ae7db261422", size = 219908, upload-time = "2026-03-17T10:30:43.906Z" }, + { url = "https://files.pythonhosted.org/packages/29/72/20b917c6793af3a5ceb7fb9c50033f3ec7865f2911a1416b34a7cfa0813b/coverage-7.13.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6e3370441f4513c6252bf042b9c36d22491142385049243253c7e48398a15a9f", size = 251419, upload-time = "2026-03-17T10:30:45.545Z" }, + { url = "https://files.pythonhosted.org/packages/8c/49/cd14b789536ac6a4778c453c6a2338bc0a2fb60c5a5a41b4008328b9acc1/coverage-7.13.5-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:03ccc709a17a1de074fb1d11f217342fb0d2b1582ed544f554fc9fc3f07e95f5", size = 254159, upload-time = "2026-03-17T10:30:47.204Z" }, + { url = "https://files.pythonhosted.org/packages/9d/00/7b0edcfe64e2ed4c0340dac14a52ad0f4c9bd0b8b5e531af7d55b703db7c/coverage-7.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f4818d065964db3c1c66dc0fbdac5ac692ecbc875555e13374fdbe7eedb4376", size = 255270, upload-time = "2026-03-17T10:30:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/7ffc4ba0f5d0a55c1e84ea7cee39c9fc06af7b170513d83fbf3bbefce280/coverage-7.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:012d5319e66e9d5a218834642d6c35d265515a62f01157a45bcc036ecf947256", size = 257538, upload-time = "2026-03-17T10:30:50.77Z" }, + { url = "https://files.pythonhosted.org/packages/81/bd/73ddf85f93f7e6fa83e77ccecb6162d9415c79007b4bc124008a4995e4a7/coverage-7.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8dd02af98971bdb956363e4827d34425cb3df19ee550ef92855b0acb9c7ce51c", size = 251821, upload-time = "2026-03-17T10:30:52.5Z" }, + { url = "https://files.pythonhosted.org/packages/a0/81/278aff4e8dec4926a0bcb9486320752811f543a3ce5b602cc7a29978d073/coverage-7.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f08fd75c50a760c7eb068ae823777268daaf16a80b918fa58eea888f8e3919f5", size = 253191, upload-time = "2026-03-17T10:30:54.543Z" }, + { url = "https://files.pythonhosted.org/packages/70/ee/fe1621488e2e0a58d7e94c4800f0d96f79671553488d401a612bebae324b/coverage-7.13.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:843ea8643cf967d1ac7e8ecd4bb00c99135adf4816c0c0593fdcc47b597fcf09", size = 251337, upload-time = "2026-03-17T10:30:56.663Z" }, + { url = "https://files.pythonhosted.org/packages/37/a6/f79fb37aa104b562207cc23cb5711ab6793608e246cae1e93f26b2236ed9/coverage-7.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:9d44d7aa963820b1b971dbecd90bfe5fe8f81cff79787eb6cca15750bd2f79b9", size = 255404, upload-time = "2026-03-17T10:30:58.427Z" }, + { url = "https://files.pythonhosted.org/packages/75/f0/ed15262a58ec81ce457ceb717b7f78752a1713556b19081b76e90896e8d4/coverage-7.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:7132bed4bd7b836200c591410ae7d97bf7ae8be6fc87d160b2bd881df929e7bf", size = 250903, upload-time = "2026-03-17T10:31:00.093Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e9/9129958f20e7e9d4d56d51d42ccf708d15cac355ff4ac6e736e97a9393d2/coverage-7.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a698e363641b98843c517817db75373c83254781426e94ada3197cabbc2c919c", size = 252780, upload-time = "2026-03-17T10:31:01.916Z" }, + { url = "https://files.pythonhosted.org/packages/a4/d7/0ad9b15812d81272db94379fe4c6df8fd17781cc7671fdfa30c76ba5ff7b/coverage-7.13.5-cp312-cp312-win32.whl", hash = "sha256:bdba0a6b8812e8c7df002d908a9a2ea3c36e92611b5708633c50869e6d922fdf", size = 222093, upload-time = "2026-03-17T10:31:03.642Z" }, + { url = "https://files.pythonhosted.org/packages/29/3d/821a9a5799fac2556bcf0bd37a70d1d11fa9e49784b6d22e92e8b2f85f18/coverage-7.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:d2c87e0c473a10bffe991502eac389220533024c8082ec1ce849f4218dded810", size = 222900, upload-time = "2026-03-17T10:31:05.651Z" }, + { url = "https://files.pythonhosted.org/packages/d4/fa/2238c2ad08e35cf4f020ea721f717e09ec3152aea75d191a7faf3ef009a8/coverage-7.13.5-cp312-cp312-win_arm64.whl", hash = "sha256:bf69236a9a81bdca3bff53796237aab096cdbf8d78a66ad61e992d9dac7eb2de", size = 221515, upload-time = "2026-03-17T10:31:07.293Z" }, + { url = "https://files.pythonhosted.org/packages/74/8c/74fedc9663dcf168b0a059d4ea756ecae4da77a489048f94b5f512a8d0b3/coverage-7.13.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ec4af212df513e399cf11610cc27063f1586419e814755ab362e50a85ea69c1", size = 219576, upload-time = "2026-03-17T10:31:09.045Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c9/44fb661c55062f0818a6ffd2685c67aa30816200d5f2817543717d4b92eb/coverage-7.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:941617e518602e2d64942c88ec8499f7fbd49d3f6c4327d3a71d43a1973032f3", size = 219942, upload-time = "2026-03-17T10:31:10.708Z" }, + { url = "https://files.pythonhosted.org/packages/5f/13/93419671cee82b780bab7ea96b67c8ef448f5f295f36bf5031154ec9a790/coverage-7.13.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:da305e9937617ee95c2e39d8ff9f040e0487cbf1ac174f777ed5eddd7a7c1f26", size = 250935, upload-time = "2026-03-17T10:31:12.392Z" }, + { url = "https://files.pythonhosted.org/packages/ac/68/1666e3a4462f8202d836920114fa7a5ee9275d1fa45366d336c551a162dd/coverage-7.13.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:78e696e1cc714e57e8b25760b33a8b1026b7048d270140d25dafe1b0a1ee05a3", size = 253541, upload-time = "2026-03-17T10:31:14.247Z" }, + { url = "https://files.pythonhosted.org/packages/4e/5e/3ee3b835647be646dcf3c65a7c6c18f87c27326a858f72ab22c12730773d/coverage-7.13.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02ca0eed225b2ff301c474aeeeae27d26e2537942aa0f87491d3e147e784a82b", size = 254780, upload-time = "2026-03-17T10:31:16.193Z" }, + { url = "https://files.pythonhosted.org/packages/44/b3/cb5bd1a04cfcc49ede6cd8409d80bee17661167686741e041abc7ee1b9a9/coverage-7.13.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:04690832cbea4e4663d9149e05dba142546ca05cb1848816760e7f58285c970a", size = 256912, upload-time = "2026-03-17T10:31:17.89Z" }, + { url = "https://files.pythonhosted.org/packages/1b/66/c1dceb7b9714473800b075f5c8a84f4588f887a90eb8645282031676e242/coverage-7.13.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0590e44dd2745c696a778f7bab6aa95256de2cbc8b8cff4f7db8ff09813d6969", size = 251165, upload-time = "2026-03-17T10:31:19.605Z" }, + { url = "https://files.pythonhosted.org/packages/b7/62/5502b73b97aa2e53ea22a39cf8649ff44827bef76d90bf638777daa27a9d/coverage-7.13.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d7cfad2d6d81dd298ab6b89fe72c3b7b05ec7544bdda3b707ddaecff8d25c161", size = 252908, upload-time = "2026-03-17T10:31:21.312Z" }, + { url = "https://files.pythonhosted.org/packages/7d/37/7792c2d69854397ca77a55c4646e5897c467928b0e27f2d235d83b5d08c6/coverage-7.13.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e092b9499de38ae0fbfbc603a74660eb6ff3e869e507b50d85a13b6db9863e15", size = 250873, upload-time = "2026-03-17T10:31:23.565Z" }, + { url = "https://files.pythonhosted.org/packages/a3/23/bc866fb6163be52a8a9e5d708ba0d3b1283c12158cefca0a8bbb6e247a43/coverage-7.13.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:48c39bc4a04d983a54a705a6389512883d4a3b9862991b3617d547940e9f52b1", size = 255030, upload-time = "2026-03-17T10:31:25.58Z" }, + { url = "https://files.pythonhosted.org/packages/7d/8b/ef67e1c222ef49860701d346b8bbb70881bef283bd5f6cbba68a39a086c7/coverage-7.13.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2d3807015f138ffea1ed9afeeb8624fd781703f2858b62a8dd8da5a0994c57b6", size = 250694, upload-time = "2026-03-17T10:31:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/46/0d/866d1f74f0acddbb906db212e096dee77a8e2158ca5e6bb44729f9d93298/coverage-7.13.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee2aa19e03161671ec964004fb74b2257805d9710bf14a5c704558b9d8dbaf17", size = 252469, upload-time = "2026-03-17T10:31:29.472Z" }, + { url = "https://files.pythonhosted.org/packages/7a/f5/be742fec31118f02ce42b21c6af187ad6a344fed546b56ca60caacc6a9a0/coverage-7.13.5-cp313-cp313-win32.whl", hash = "sha256:ce1998c0483007608c8382f4ff50164bfc5bd07a2246dd272aa4043b75e61e85", size = 222112, upload-time = "2026-03-17T10:31:31.526Z" }, + { url = "https://files.pythonhosted.org/packages/66/40/7732d648ab9d069a46e686043241f01206348e2bbf128daea85be4d6414b/coverage-7.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:631efb83f01569670a5e866ceb80fe483e7c159fac6f167e6571522636104a0b", size = 222923, upload-time = "2026-03-17T10:31:33.633Z" }, + { url = "https://files.pythonhosted.org/packages/48/af/fea819c12a095781f6ccd504890aaddaf88b8fab263c4940e82c7b770124/coverage-7.13.5-cp313-cp313-win_arm64.whl", hash = "sha256:f4cd16206ad171cbc2470dbea9103cf9a7607d5fe8c242fdf1edf36174020664", size = 221540, upload-time = "2026-03-17T10:31:35.445Z" }, + { url = "https://files.pythonhosted.org/packages/23/d2/17879af479df7fbbd44bd528a31692a48f6b25055d16482fdf5cdb633805/coverage-7.13.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0428cbef5783ad91fe240f673cc1f76b25e74bbfe1a13115e4aa30d3f538162d", size = 220262, upload-time = "2026-03-17T10:31:37.184Z" }, + { url = "https://files.pythonhosted.org/packages/5b/4c/d20e554f988c8f91d6a02c5118f9abbbf73a8768a3048cb4962230d5743f/coverage-7.13.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e0b216a19534b2427cc201a26c25da4a48633f29a487c61258643e89d28200c0", size = 220617, upload-time = "2026-03-17T10:31:39.245Z" }, + { url = "https://files.pythonhosted.org/packages/29/9c/f9f5277b95184f764b24e7231e166dfdb5780a46d408a2ac665969416d61/coverage-7.13.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:972a9cd27894afe4bc2b1480107054e062df08e671df7c2f18c205e805ccd806", size = 261912, upload-time = "2026-03-17T10:31:41.324Z" }, + { url = "https://files.pythonhosted.org/packages/d5/f6/7f1ab39393eeb50cfe4747ae8ef0e4fc564b989225aa1152e13a180d74f8/coverage-7.13.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4b59148601efcd2bac8c4dbf1f0ad6391693ccf7a74b8205781751637076aee3", size = 263987, upload-time = "2026-03-17T10:31:43.724Z" }, + { url = "https://files.pythonhosted.org/packages/a0/d7/62c084fb489ed9c6fbdf57e006752e7c516ea46fd690e5ed8b8617c7d52e/coverage-7.13.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:505d7083c8b0c87a8fa8c07370c285847c1f77739b22e299ad75a6af6c32c5c9", size = 266416, upload-time = "2026-03-17T10:31:45.769Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f6/df63d8660e1a0bff6125947afda112a0502736f470d62ca68b288ea762d8/coverage-7.13.5-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:60365289c3741e4db327e7baff2a4aaacf22f788e80fa4683393891b70a89fbd", size = 267558, upload-time = "2026-03-17T10:31:48.293Z" }, + { url = "https://files.pythonhosted.org/packages/5b/02/353ca81d36779bd108f6d384425f7139ac3c58c750dcfaafe5d0bee6436b/coverage-7.13.5-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1b88c69c8ef5d4b6fe7dea66d6636056a0f6a7527c440e890cf9259011f5e606", size = 261163, upload-time = "2026-03-17T10:31:50.125Z" }, + { url = "https://files.pythonhosted.org/packages/2c/16/2e79106d5749bcaf3aee6d309123548e3276517cd7851faa8da213bc61bf/coverage-7.13.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5b13955d31d1633cf9376908089b7cebe7d15ddad7aeaabcbe969a595a97e95e", size = 263981, upload-time = "2026-03-17T10:31:51.961Z" }, + { url = "https://files.pythonhosted.org/packages/29/c7/c29e0c59ffa6942030ae6f50b88ae49988e7e8da06de7ecdbf49c6d4feae/coverage-7.13.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:f70c9ab2595c56f81a89620e22899eea8b212a4041bd728ac6f4a28bf5d3ddd0", size = 261604, upload-time = "2026-03-17T10:31:53.872Z" }, + { url = "https://files.pythonhosted.org/packages/40/48/097cdc3db342f34006a308ab41c3a7c11c3f0d84750d340f45d88a782e00/coverage-7.13.5-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:084b84a8c63e8d6fc7e3931b316a9bcafca1458d753c539db82d31ed20091a87", size = 265321, upload-time = "2026-03-17T10:31:55.997Z" }, + { url = "https://files.pythonhosted.org/packages/bb/1f/4994af354689e14fd03a75f8ec85a9a68d94e0188bbdab3fc1516b55e512/coverage-7.13.5-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:ad14385487393e386e2ea988b09d62dd42c397662ac2dabc3832d71253eee479", size = 260502, upload-time = "2026-03-17T10:31:58.308Z" }, + { url = "https://files.pythonhosted.org/packages/22/c6/9bb9ef55903e628033560885f5c31aa227e46878118b63ab15dc7ba87797/coverage-7.13.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7f2c47b36fe7709a6e83bfadf4eefb90bd25fbe4014d715224c4316f808e59a2", size = 262688, upload-time = "2026-03-17T10:32:00.141Z" }, + { url = "https://files.pythonhosted.org/packages/14/4f/f5df9007e50b15e53e01edea486814783a7f019893733d9e4d6caad75557/coverage-7.13.5-cp313-cp313t-win32.whl", hash = "sha256:67e9bc5449801fad0e5dff329499fb090ba4c5800b86805c80617b4e29809b2a", size = 222788, upload-time = "2026-03-17T10:32:02.246Z" }, + { url = "https://files.pythonhosted.org/packages/e1/98/aa7fccaa97d0f3192bec013c4e6fd6d294a6ed44b640e6bb61f479e00ed5/coverage-7.13.5-cp313-cp313t-win_amd64.whl", hash = "sha256:da86cdcf10d2519e10cabb8ac2de03da1bcb6e4853790b7fbd48523332e3a819", size = 223851, upload-time = "2026-03-17T10:32:04.416Z" }, + { url = "https://files.pythonhosted.org/packages/3d/8b/e5c469f7352651e5f013198e9e21f97510b23de957dd06a84071683b4b60/coverage-7.13.5-cp313-cp313t-win_arm64.whl", hash = "sha256:0ecf12ecb326fe2c339d93fc131816f3a7367d223db37817208905c89bded911", size = 222104, upload-time = "2026-03-17T10:32:06.65Z" }, + { url = "https://files.pythonhosted.org/packages/8e/77/39703f0d1d4b478bfd30191d3c14f53caf596fac00efb3f8f6ee23646439/coverage-7.13.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:fbabfaceaeb587e16f7008f7795cd80d20ec548dc7f94fbb0d4ec2e038ce563f", size = 219621, upload-time = "2026-03-17T10:32:08.589Z" }, + { url = "https://files.pythonhosted.org/packages/e2/3e/51dff36d99ae14639a133d9b164d63e628532e2974d8b1edb99dd1ebc733/coverage-7.13.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9bb2a28101a443669a423b665939381084412b81c3f8c0fcfbac57f4e30b5b8e", size = 219953, upload-time = "2026-03-17T10:32:10.507Z" }, + { url = "https://files.pythonhosted.org/packages/6a/6c/1f1917b01eb647c2f2adc9962bd66c79eb978951cab61bdc1acab3290c07/coverage-7.13.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bd3a2fbc1c6cccb3c5106140d87cc6a8715110373ef42b63cf5aea29df8c217a", size = 250992, upload-time = "2026-03-17T10:32:12.41Z" }, + { url = "https://files.pythonhosted.org/packages/22/e5/06b1f88f42a5a99df42ce61208bdec3bddb3d261412874280a19796fc09c/coverage-7.13.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6c36ddb64ed9d7e496028d1d00dfec3e428e0aabf4006583bb1839958d280510", size = 253503, upload-time = "2026-03-17T10:32:14.449Z" }, + { url = "https://files.pythonhosted.org/packages/80/28/2a148a51e5907e504fa7b85490277734e6771d8844ebcc48764a15e28155/coverage-7.13.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:380e8e9084d8eb38db3a9176a1a4f3c0082c3806fa0dc882d1d87abc3c789247", size = 254852, upload-time = "2026-03-17T10:32:16.56Z" }, + { url = "https://files.pythonhosted.org/packages/61/77/50e8d3d85cc0b7ebe09f30f151d670e302c7ff4a1bf6243f71dd8b0981fa/coverage-7.13.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e808af52a0513762df4d945ea164a24b37f2f518cbe97e03deaa0ee66139b4d6", size = 257161, upload-time = "2026-03-17T10:32:19.004Z" }, + { url = "https://files.pythonhosted.org/packages/3b/c4/b5fd1d4b7bf8d0e75d997afd3925c59ba629fc8616f1b3aae7605132e256/coverage-7.13.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e301d30dd7e95ae068671d746ba8c34e945a82682e62918e41b2679acd2051a0", size = 251021, upload-time = "2026-03-17T10:32:21.344Z" }, + { url = "https://files.pythonhosted.org/packages/f8/66/6ea21f910e92d69ef0b1c3346ea5922a51bad4446c9126db2ae96ee24c4c/coverage-7.13.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:800bc829053c80d240a687ceeb927a94fd108bbdc68dfbe505d0d75ab578a882", size = 252858, upload-time = "2026-03-17T10:32:23.506Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ea/879c83cb5d61aa2a35fb80e72715e92672daef8191b84911a643f533840c/coverage-7.13.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:0b67af5492adb31940ee418a5a655c28e48165da5afab8c7fa6fd72a142f8740", size = 250823, upload-time = "2026-03-17T10:32:25.516Z" }, + { url = "https://files.pythonhosted.org/packages/8a/fb/616d95d3adb88b9803b275580bdeee8bd1b69a886d057652521f83d7322f/coverage-7.13.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c9136ff29c3a91e25b1d1552b5308e53a1e0653a23e53b6366d7c2dcbbaf8a16", size = 255099, upload-time = "2026-03-17T10:32:27.944Z" }, + { url = "https://files.pythonhosted.org/packages/1c/93/25e6917c90ec1c9a56b0b26f6cad6408e5f13bb6b35d484a0d75c9cf000d/coverage-7.13.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:cff784eef7f0b8f6cb28804fbddcfa99f89efe4cc35fb5627e3ac58f91ed3ac0", size = 250638, upload-time = "2026-03-17T10:32:29.914Z" }, + { url = "https://files.pythonhosted.org/packages/fc/7b/dc1776b0464145a929deed214aef9fb1493f159b59ff3c7eeeedf91eddd0/coverage-7.13.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:68a4953be99b17ac3c23b6efbc8a38330d99680c9458927491d18700ef23ded0", size = 252295, upload-time = "2026-03-17T10:32:31.981Z" }, + { url = "https://files.pythonhosted.org/packages/ea/fb/99cbbc56a26e07762a2740713f3c8f9f3f3106e3a3dd8cc4474954bccd34/coverage-7.13.5-cp314-cp314-win32.whl", hash = "sha256:35a31f2b1578185fbe6aa2e74cea1b1d0bbf4c552774247d9160d29b80ed56cc", size = 222360, upload-time = "2026-03-17T10:32:34.233Z" }, + { url = "https://files.pythonhosted.org/packages/8d/b7/4758d4f73fb536347cc5e4ad63662f9d60ba9118cb6785e9616b2ce5d7fa/coverage-7.13.5-cp314-cp314-win_amd64.whl", hash = "sha256:2aa055ae1857258f9e0045be26a6d62bdb47a72448b62d7b55f4820f361a2633", size = 223174, upload-time = "2026-03-17T10:32:36.369Z" }, + { url = "https://files.pythonhosted.org/packages/2c/f2/24d84e1dfe70f8ac9fdf30d338239860d0d1d5da0bda528959d0ebc9da28/coverage-7.13.5-cp314-cp314-win_arm64.whl", hash = "sha256:1b11eef33edeae9d142f9b4358edb76273b3bfd30bc3df9a4f95d0e49caf94e8", size = 221739, upload-time = "2026-03-17T10:32:38.736Z" }, + { url = "https://files.pythonhosted.org/packages/60/5b/4a168591057b3668c2428bff25dd3ebc21b629d666d90bcdfa0217940e84/coverage-7.13.5-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:10a0c37f0b646eaff7cce1874c31d1f1ccb297688d4c747291f4f4c70741cc8b", size = 220351, upload-time = "2026-03-17T10:32:41.196Z" }, + { url = "https://files.pythonhosted.org/packages/f5/21/1fd5c4dbfe4a58b6b99649125635df46decdfd4a784c3cd6d410d303e370/coverage-7.13.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b5db73ba3c41c7008037fa731ad5459fc3944cb7452fc0aa9f822ad3533c583c", size = 220612, upload-time = "2026-03-17T10:32:43.204Z" }, + { url = "https://files.pythonhosted.org/packages/d6/fe/2a924b3055a5e7e4512655a9d4609781b0d62334fa0140c3e742926834e2/coverage-7.13.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:750db93a81e3e5a9831b534be7b1229df848b2e125a604fe6651e48aa070e5f9", size = 261985, upload-time = "2026-03-17T10:32:45.514Z" }, + { url = "https://files.pythonhosted.org/packages/d7/0d/c8928f2bd518c45990fe1a2ab8db42e914ef9b726c975facc4282578c3eb/coverage-7.13.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ddb4f4a5479f2539644be484da179b653273bca1a323947d48ab107b3ed1f29", size = 264107, upload-time = "2026-03-17T10:32:47.971Z" }, + { url = "https://files.pythonhosted.org/packages/ef/ae/4ae35bbd9a0af9d820362751f0766582833c211224b38665c0f8de3d487f/coverage-7.13.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8a7a2049c14f413163e2bdabd37e41179b1d1ccb10ffc6ccc4b7a718429c607", size = 266513, upload-time = "2026-03-17T10:32:50.1Z" }, + { url = "https://files.pythonhosted.org/packages/9c/20/d326174c55af36f74eac6ae781612d9492f060ce8244b570bb9d50d9d609/coverage-7.13.5-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1c85e0b6c05c592ea6d8768a66a254bfb3874b53774b12d4c89c481eb78cb90", size = 267650, upload-time = "2026-03-17T10:32:52.391Z" }, + { url = "https://files.pythonhosted.org/packages/7a/5e/31484d62cbd0eabd3412e30d74386ece4a0837d4f6c3040a653878bfc019/coverage-7.13.5-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:777c4d1eff1b67876139d24288aaf1817f6c03d6bae9c5cc8d27b83bcfe38fe3", size = 261089, upload-time = "2026-03-17T10:32:54.544Z" }, + { url = "https://files.pythonhosted.org/packages/e9/d8/49a72d6de146eebb0b7e48cc0f4bc2c0dd858e3d4790ab2b39a2872b62bd/coverage-7.13.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6697e29b93707167687543480a40f0db8f356e86d9f67ddf2e37e2dfd91a9dab", size = 263982, upload-time = "2026-03-17T10:32:56.803Z" }, + { url = "https://files.pythonhosted.org/packages/06/3b/0351f1bd566e6e4dd39e978efe7958bde1d32f879e85589de147654f57bb/coverage-7.13.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:8fdf453a942c3e4d99bd80088141c4c6960bb232c409d9c3558e2dbaa3998562", size = 261579, upload-time = "2026-03-17T10:32:59.466Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ce/796a2a2f4017f554d7810f5c573449b35b1e46788424a548d4d19201b222/coverage-7.13.5-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:32ca0c0114c9834a43f045a87dcebd69d108d8ffb666957ea65aa132f50332e2", size = 265316, upload-time = "2026-03-17T10:33:01.847Z" }, + { url = "https://files.pythonhosted.org/packages/3d/16/d5ae91455541d1a78bc90abf495be600588aff8f6db5c8b0dae739fa39c9/coverage-7.13.5-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:8769751c10f339021e2638cd354e13adeac54004d1941119b2c96fe5276d45ea", size = 260427, upload-time = "2026-03-17T10:33:03.945Z" }, + { url = "https://files.pythonhosted.org/packages/48/11/07f413dba62db21fb3fad5d0de013a50e073cc4e2dc4306e770360f6dfc8/coverage-7.13.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cec2d83125531bd153175354055cdb7a09987af08a9430bd173c937c6d0fba2a", size = 262745, upload-time = "2026-03-17T10:33:06.285Z" }, + { url = "https://files.pythonhosted.org/packages/91/15/d792371332eb4663115becf4bad47e047d16234b1aff687b1b18c58d60ae/coverage-7.13.5-cp314-cp314t-win32.whl", hash = "sha256:0cd9ed7a8b181775459296e402ca4fb27db1279740a24e93b3b41942ebe4b215", size = 223146, upload-time = "2026-03-17T10:33:08.756Z" }, + { url = "https://files.pythonhosted.org/packages/db/51/37221f59a111dca5e85be7dbf09696323b5b9f13ff65e0641d535ed06ea8/coverage-7.13.5-cp314-cp314t-win_amd64.whl", hash = "sha256:301e3b7dfefecaca37c9f1aa6f0049b7d4ab8dd933742b607765d757aca77d43", size = 224254, upload-time = "2026-03-17T10:33:11.174Z" }, + { url = "https://files.pythonhosted.org/packages/54/83/6acacc889de8987441aa7d5adfbdbf33d288dad28704a67e574f1df9bcbb/coverage-7.13.5-cp314-cp314t-win_arm64.whl", hash = "sha256:9dacc2ad679b292709e0f5fc1ac74a6d4d5562e424058962c7bb0c658ad25e45", size = 222276, upload-time = "2026-03-17T10:33:13.466Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ee/a4cf96b8ce1e566ed238f0659ac2d3f007ed1d14b181bcb684e19561a69a/coverage-7.13.5-py3-none-any.whl", hash = "sha256:34b02417cf070e173989b3db962f7ed56d2f644307b2cf9d5a0f258e13084a61", size = 211346, upload-time = "2026-03-17T10:33:15.691Z" }, +] + +[[package]] +name = "foreman-client" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "httpx" }, + { name = "pydantic" }, + { name = "structlog" }, +] + +[package.dev-dependencies] +test = [ + { name = "coverage" }, + { name = "pytest" }, + { name = "pytest-agent-digest" }, + { name = "pytest-cov" }, + { name = "pytest-mock" }, + { name = "respx" }, +] + +[package.metadata] +requires-dist = [ + { name = "httpx", specifier = ">=0.28.1" }, + { name = "pydantic", specifier = ">=2.0" }, + { name = "structlog", specifier = ">=25.5.0" }, +] + +[package.metadata.requires-dev] +test = [ + { name = "coverage" }, + { name = "pytest" }, + { name = "pytest-agent-digest" }, + { name = "pytest-cov" }, + { name = "pytest-mock" }, + { name = "respx" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "idna" +version = "3.13" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ce/cc/762dfb036166873f0059f3b7de4565e1b5bc3d6f28a414c13da27e442f99/idna-3.13.tar.gz", hash = "sha256:585ea8fe5d69b9181ec1afba340451fba6ba764af97026f92a91d4eef164a242", size = 194210, upload-time = "2026-04-22T16:42:42.314Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/13/ad7d7ca3808a898b4612b6fe93cde56b53f3034dcde235acb1f0e1df24c6/idna-3.13-py3-none-any.whl", hash = "sha256:892ea0cde124a99ce773decba204c5552b69c3c67ffd5f232eb7696135bc8bb3", size = 68629, upload-time = "2026-04-22T16:42:40.909Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "packaging" +version = "26.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/f1/e7a6dd94a8d4a5626c03e4e99c87f241ba9e350cd9e6d75123f992427270/packaging-26.2.tar.gz", hash = "sha256:ff452ff5a3e828ce110190feff1178bb1f2ea2281fa2075aadb987c2fb221661", size = 228134, upload-time = "2026-04-24T20:15:23.917Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/b2/87e62e8c3e2f4b32e5fe99e0b86d576da1312593b39f47d8ceef365e95ed/packaging-26.2-py3-none-any.whl", hash = "sha256:5fc45236b9446107ff2415ce77c807cee2862cb6fac22b8a73826d0693b0980e", size = 100195, upload-time = "2026-04-24T20:15:22.081Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pydantic" +version = "2.13.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d9/e4/40d09941a2cebcb20609b86a559817d5b9291c49dd6f8c87e5feffbe703a/pydantic-2.13.3.tar.gz", hash = "sha256:af09e9d1d09f4e7fe37145c1f577e1d61ceb9a41924bf0094a36506285d0a84d", size = 844068, upload-time = "2026-04-20T14:46:43.632Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/0a/fd7d723f8f8153418fb40cf9c940e82004fce7e987026b08a68a36dd3fe7/pydantic-2.13.3-py3-none-any.whl", hash = "sha256:6db14ac8dfc9a1e57f87ea2c0de670c251240f43cb0c30a5130e9720dc612927", size = 471981, upload-time = "2026-04-20T14:46:41.402Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.46.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2a/ef/f7abb56c49382a246fd2ce9c799691e3c3e7175ec74b14d99e798bcddb1a/pydantic_core-2.46.3.tar.gz", hash = "sha256:41c178f65b8c29807239d47e6050262eb6bf84eb695e41101e62e38df4a5bc2c", size = 471412, upload-time = "2026-04-20T14:40:56.672Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4b/cb/5b47425556ecc1f3fe18ed2a0083188aa46e1dd812b06e406475b3a5d536/pydantic_core-2.46.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b11b59b3eee90a80a36701ddb4576d9ae31f93f05cb9e277ceaa09e6bf074a67", size = 2101946, upload-time = "2026-04-20T14:40:52.581Z" }, + { url = "https://files.pythonhosted.org/packages/a1/4f/2fb62c2267cae99b815bbf4a7b9283812c88ca3153ef29f7707200f1d4e5/pydantic_core-2.46.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:af8653713055ea18a3abc1537fe2ebc42f5b0bbb768d1eb79fd74eb47c0ac089", size = 1951612, upload-time = "2026-04-20T14:42:42.996Z" }, + { url = "https://files.pythonhosted.org/packages/50/6e/b7348fd30d6556d132cddd5bd79f37f96f2601fe0608afac4f5fb01ec0b3/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75a519dab6d63c514f3a81053e5266c549679e4aa88f6ec57f2b7b854aceb1b0", size = 1977027, upload-time = "2026-04-20T14:42:02.001Z" }, + { url = "https://files.pythonhosted.org/packages/82/11/31d60ee2b45540d3fb0b29302a393dbc01cd771c473f5b5147bcd353e593/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6cd87cb1575b1ad05ba98894c5b5c96411ef678fa2f6ed2576607095b8d9789", size = 2063008, upload-time = "2026-04-20T14:44:17.952Z" }, + { url = "https://files.pythonhosted.org/packages/8a/db/3a9d1957181b59258f44a2300ab0f0be9d1e12d662a4f57bb31250455c52/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f80a55484b8d843c8ada81ebf70a682f3f00a3d40e378c06cf17ecb44d280d7d", size = 2233082, upload-time = "2026-04-20T14:40:57.934Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e1/3277c38792aeb5cfb18c2f0c5785a221d9ff4e149abbe1184d53d5f72273/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3861f1731b90c50a3266316b9044f5c9b405eecb8e299b0a7120596334e4fe9c", size = 2304615, upload-time = "2026-04-20T14:42:12.584Z" }, + { url = "https://files.pythonhosted.org/packages/5e/d5/e3d9717c9eba10855325650afd2a9cba8e607321697f18953af9d562da2f/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb528e295ed31570ac3dcc9bfdd6e0150bc11ce6168ac87a8082055cf1a67395", size = 2094380, upload-time = "2026-04-20T14:43:05.522Z" }, + { url = "https://files.pythonhosted.org/packages/a1/20/abac35dedcbfd66c6f0b03e4e3564511771d6c9b7ede10a362d03e110d9b/pydantic_core-2.46.3-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:367508faa4973b992b271ba1494acaab36eb7e8739d1e47be5035fb1ea225396", size = 2135429, upload-time = "2026-04-20T14:41:55.549Z" }, + { url = "https://files.pythonhosted.org/packages/6c/a5/41bfd1df69afad71b5cf0535055bccc73022715ad362edbc124bc1e021d7/pydantic_core-2.46.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ad3c826fe523e4becf4fe39baa44286cff85ef137c729a2c5e269afbfd0905d", size = 2174582, upload-time = "2026-04-20T14:41:45.96Z" }, + { url = "https://files.pythonhosted.org/packages/79/65/38d86ea056b29b2b10734eb23329b7a7672ca604df4f2b6e9c02d4ee22fe/pydantic_core-2.46.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ec638c5d194ef8af27db69f16c954a09797c0dc25015ad6123eb2c73a4d271ca", size = 2187533, upload-time = "2026-04-20T14:40:55.367Z" }, + { url = "https://files.pythonhosted.org/packages/b6/55/a1129141678a2026badc539ad1dee0a71d06f54c2f06a4bd68c030ac781b/pydantic_core-2.46.3-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:28ed528c45446062ee66edb1d33df5d88828ae167de76e773a3c7f64bd14e976", size = 2332985, upload-time = "2026-04-20T14:44:13.05Z" }, + { url = "https://files.pythonhosted.org/packages/d7/60/cb26f4077719f709e54819f4e8e1d43f4091f94e285eb6bd21e1190a7b7c/pydantic_core-2.46.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aed19d0c783886d5bd86d80ae5030006b45e28464218747dcf83dabfdd092c7b", size = 2373670, upload-time = "2026-04-20T14:41:53.421Z" }, + { url = "https://files.pythonhosted.org/packages/6b/7e/c3f21882bdf1d8d086876f81b5e296206c69c6082551d776895de7801fa0/pydantic_core-2.46.3-cp312-cp312-win32.whl", hash = "sha256:06d5d8820cbbdb4147578c1fe7ffcd5b83f34508cb9f9ab76e807be7db6ff0a4", size = 1966722, upload-time = "2026-04-20T14:44:30.588Z" }, + { url = "https://files.pythonhosted.org/packages/57/be/6b5e757b859013ebfbd7adba02f23b428f37c86dcbf78b5bb0b4ffd36e99/pydantic_core-2.46.3-cp312-cp312-win_amd64.whl", hash = "sha256:c3212fda0ee959c1dd04c60b601ec31097aaa893573a3a1abd0a47bcac2968c1", size = 2072970, upload-time = "2026-04-20T14:42:54.248Z" }, + { url = "https://files.pythonhosted.org/packages/bf/f8/a989b21cc75e9a32d24192ef700eea606521221a89faa40c919ce884f2b1/pydantic_core-2.46.3-cp312-cp312-win_arm64.whl", hash = "sha256:f1f8338dd7a7f31761f1f1a3c47503a9a3b34eea3c8b01fa6ee96408affb5e72", size = 2035963, upload-time = "2026-04-20T14:44:20.4Z" }, + { url = "https://files.pythonhosted.org/packages/9b/3c/9b5e8eb9821936d065439c3b0fb1490ffa64163bfe7e1595985a47896073/pydantic_core-2.46.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:12bc98de041458b80c86c56b24df1d23832f3e166cbaff011f25d187f5c62c37", size = 2102109, upload-time = "2026-04-20T14:41:24.219Z" }, + { url = "https://files.pythonhosted.org/packages/91/97/1c41d1f5a19f241d8069f1e249853bcce378cdb76eec8ab636d7bc426280/pydantic_core-2.46.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:85348b8f89d2c3508b65b16c3c33a4da22b8215138d8b996912bb1532868885f", size = 1951820, upload-time = "2026-04-20T14:42:14.236Z" }, + { url = "https://files.pythonhosted.org/packages/30/b4/d03a7ae14571bc2b6b3c7b122441154720619afe9a336fa3a95434df5e2f/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1105677a6df914b1fb71a81b96c8cce7726857e1717d86001f29be06a25ee6f8", size = 1977785, upload-time = "2026-04-20T14:42:31.648Z" }, + { url = "https://files.pythonhosted.org/packages/ae/0c/4086f808834b59e3c8f1aa26df8f4b6d998cdcf354a143d18ef41529d1fe/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87082cd65669a33adeba5470769e9704c7cf026cc30afb9cc77fd865578ebaad", size = 2062761, upload-time = "2026-04-20T14:40:37.093Z" }, + { url = "https://files.pythonhosted.org/packages/fa/71/a649be5a5064c2df0db06e0a512c2281134ed2fcc981f52a657936a7527c/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60e5f66e12c4f5212d08522963380eaaeac5ebd795826cfd19b2dfb0c7a52b9c", size = 2232989, upload-time = "2026-04-20T14:42:59.254Z" }, + { url = "https://files.pythonhosted.org/packages/a2/84/7756e75763e810b3a710f4724441d1ecc5883b94aacb07ca71c5fb5cfb69/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6cdf19bf84128d5e7c37e8a73a0c5c10d51103a650ac585d42dd6ae233f2b7f", size = 2303975, upload-time = "2026-04-20T14:41:32.287Z" }, + { url = "https://files.pythonhosted.org/packages/6c/35/68a762e0c1e31f35fa0dac733cbd9f5b118042853698de9509c8e5bf128b/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:031bb17f4885a43773c8c763089499f242aee2ea85cf17154168775dccdecf35", size = 2095325, upload-time = "2026-04-20T14:42:47.685Z" }, + { url = "https://files.pythonhosted.org/packages/77/bf/1bf8c9a8e91836c926eae5e3e51dce009bf495a60ca56060689d3df3f340/pydantic_core-2.46.3-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:bcf2a8b2982a6673693eae7348ef3d8cf3979c1d63b54fca7c397a635cc68687", size = 2133368, upload-time = "2026-04-20T14:41:22.766Z" }, + { url = "https://files.pythonhosted.org/packages/e5/50/87d818d6bab915984995157ceb2380f5aac4e563dddbed6b56f0ed057aba/pydantic_core-2.46.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28e8cf2f52d72ced402a137145923a762cbb5081e48b34312f7a0c8f55928ec3", size = 2173908, upload-time = "2026-04-20T14:42:52.044Z" }, + { url = "https://files.pythonhosted.org/packages/91/88/a311fb306d0bd6185db41fa14ae888fb81d0baf648a761ae760d30819d33/pydantic_core-2.46.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:17eaface65d9fc5abb940003020309c1bf7a211f5f608d7870297c367e6f9022", size = 2186422, upload-time = "2026-04-20T14:43:29.55Z" }, + { url = "https://files.pythonhosted.org/packages/8f/79/28fd0d81508525ab2054fef7c77a638c8b5b0afcbbaeee493cf7c3fef7e1/pydantic_core-2.46.3-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:93fd339f23408a07e98950a89644f92c54d8729719a40b30c0a30bb9ebc55d23", size = 2332709, upload-time = "2026-04-20T14:42:16.134Z" }, + { url = "https://files.pythonhosted.org/packages/b3/21/795bf5fe5c0f379308b8ef19c50dedab2e7711dbc8d0c2acf08f1c7daa05/pydantic_core-2.46.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:23cbdb3aaa74dfe0837975dbf69b469753bbde8eacace524519ffdb6b6e89eb7", size = 2372428, upload-time = "2026-04-20T14:41:10.974Z" }, + { url = "https://files.pythonhosted.org/packages/45/b3/ed14c659cbe7605e3ef063077680a64680aec81eb1a04763a05190d49b7f/pydantic_core-2.46.3-cp313-cp313-win32.whl", hash = "sha256:610eda2e3838f401105e6326ca304f5da1e15393ae25dacae5c5c63f2c275b13", size = 1965601, upload-time = "2026-04-20T14:41:42.128Z" }, + { url = "https://files.pythonhosted.org/packages/ef/bb/adb70d9a762ddd002d723fbf1bd492244d37da41e3af7b74ad212609027e/pydantic_core-2.46.3-cp313-cp313-win_amd64.whl", hash = "sha256:68cc7866ed863db34351294187f9b729964c371ba33e31c26f478471c52e1ed0", size = 2071517, upload-time = "2026-04-20T14:43:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/52/eb/66faefabebfe68bd7788339c9c9127231e680b11906368c67ce112fdb47f/pydantic_core-2.46.3-cp313-cp313-win_arm64.whl", hash = "sha256:f64b5537ac62b231572879cd08ec05600308636a5d63bcbdb15063a466977bec", size = 2035802, upload-time = "2026-04-20T14:43:38.507Z" }, + { url = "https://files.pythonhosted.org/packages/7f/db/a7bcb4940183fda36022cd18ba8dd12f2dff40740ec7b58ce7457befa416/pydantic_core-2.46.3-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:afa3aa644f74e290cdede48a7b0bee37d1c35e71b05105f6b340d484af536d9b", size = 2097614, upload-time = "2026-04-20T14:44:38.374Z" }, + { url = "https://files.pythonhosted.org/packages/24/35/e4066358a22e3e99519db370494c7528f5a2aa1367370e80e27e20283543/pydantic_core-2.46.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ced3310e51aa425f7f77da8bbbb5212616655bedbe82c70944320bc1dbe5e018", size = 1951896, upload-time = "2026-04-20T14:40:53.996Z" }, + { url = "https://files.pythonhosted.org/packages/87/92/37cf4049d1636996e4b888c05a501f40a43ff218983a551d57f9d5e14f0d/pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e29908922ce9da1a30b4da490bd1d3d82c01dcfdf864d2a74aacee674d0bfa34", size = 1979314, upload-time = "2026-04-20T14:41:49.446Z" }, + { url = "https://files.pythonhosted.org/packages/d8/36/9ff4d676dfbdfb2d591cf43f3d90ded01e15b1404fd101180ed2d62a2fd3/pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0c9ff69140423eea8ed2d5477df3ba037f671f5e897d206d921bc9fdc39613e7", size = 2056133, upload-time = "2026-04-20T14:42:23.574Z" }, + { url = "https://files.pythonhosted.org/packages/bc/f0/405b442a4d7ba855b06eec8b2bf9c617d43b8432d099dfdc7bf999293495/pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b675ab0a0d5b1c8fdb81195dc5bcefea3f3c240871cdd7ff9a2de8aa50772eb2", size = 2228726, upload-time = "2026-04-20T14:44:22.816Z" }, + { url = "https://files.pythonhosted.org/packages/e7/f8/65cd92dd5a0bd89ba277a98ecbfaf6fc36bbd3300973c7a4b826d6ab1391/pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0087084960f209a9a4af50ecd1fb063d9ad3658c07bb81a7a53f452dacbfb2ba", size = 2301214, upload-time = "2026-04-20T14:44:48.792Z" }, + { url = "https://files.pythonhosted.org/packages/fd/86/ef96a4c6e79e7a2d0410826a68fbc0eccc0fd44aa733be199d5fcac3bb87/pydantic_core-2.46.3-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed42e6cc8e1b0e2b9b96e2276bad70ae625d10d6d524aed0c93de974ae029f9f", size = 2099927, upload-time = "2026-04-20T14:41:40.196Z" }, + { url = "https://files.pythonhosted.org/packages/6d/53/269caf30e0096e0a8a8f929d1982a27b3879872cca2d917d17c2f9fdf4fe/pydantic_core-2.46.3-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:f1771ce258afb3e4201e67d154edbbae712a76a6081079fe247c2f53c6322c22", size = 2128789, upload-time = "2026-04-20T14:41:15.868Z" }, + { url = "https://files.pythonhosted.org/packages/00/b0/1a6d9b6a587e118482910c244a1c5acf4d192604174132efd12bf0ac486f/pydantic_core-2.46.3-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a7610b6a5242a6c736d8ad47fd5fff87fcfe8f833b281b1c409c3d6835d9227f", size = 2173815, upload-time = "2026-04-20T14:44:25.152Z" }, + { url = "https://files.pythonhosted.org/packages/87/56/e7e00d4041a7e62b5a40815590114db3b535bf3ca0bf4dca9f16cef25246/pydantic_core-2.46.3-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:ff5e7783bcc5476e1db448bf268f11cb257b1c276d3e89f00b5727be86dd0127", size = 2181608, upload-time = "2026-04-20T14:41:28.933Z" }, + { url = "https://files.pythonhosted.org/packages/e8/22/4bd23c3d41f7c185d60808a1de83c76cf5aeabf792f6c636a55c3b1ec7f9/pydantic_core-2.46.3-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:9d2e32edcc143bc01e95300671915d9ca052d4f745aa0a49c48d4803f8a85f2c", size = 2326968, upload-time = "2026-04-20T14:42:03.962Z" }, + { url = "https://files.pythonhosted.org/packages/24/ac/66cd45129e3915e5ade3b292cb3bc7fd537f58f8f8dbdaba6170f7cabb74/pydantic_core-2.46.3-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6e42d83d1c6b87fa56b521479cff237e626a292f3b31b6345c15a99121b454c1", size = 2369842, upload-time = "2026-04-20T14:41:35.52Z" }, + { url = "https://files.pythonhosted.org/packages/a2/51/dd4248abb84113615473aa20d5545b7c4cd73c8644003b5259686f93996c/pydantic_core-2.46.3-cp314-cp314-win32.whl", hash = "sha256:07bc6d2a28c3adb4f7c6ae46aa4f2d2929af127f587ed44057af50bf1ce0f505", size = 1959661, upload-time = "2026-04-20T14:41:00.042Z" }, + { url = "https://files.pythonhosted.org/packages/20/eb/59980e5f1ae54a3b86372bd9f0fa373ea2d402e8cdcd3459334430f91e91/pydantic_core-2.46.3-cp314-cp314-win_amd64.whl", hash = "sha256:8940562319bc621da30714617e6a7eaa6b98c84e8c685bcdc02d7ed5e7c7c44e", size = 2071686, upload-time = "2026-04-20T14:43:16.471Z" }, + { url = "https://files.pythonhosted.org/packages/8c/db/1cf77e5247047dfee34bc01fa9bca134854f528c8eb053e144298893d370/pydantic_core-2.46.3-cp314-cp314-win_arm64.whl", hash = "sha256:5dcbbcf4d22210ced8f837c96db941bdb078f419543472aca5d9a0bb7cddc7df", size = 2026907, upload-time = "2026-04-20T14:43:31.732Z" }, + { url = "https://files.pythonhosted.org/packages/57/c0/b3df9f6a543276eadba0a48487b082ca1f201745329d97dbfa287034a230/pydantic_core-2.46.3-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:d0fe3dce1e836e418f912c1ad91c73357d03e556a4d286f441bf34fed2dbeecf", size = 2095047, upload-time = "2026-04-20T14:42:37.982Z" }, + { url = "https://files.pythonhosted.org/packages/66/57/886a938073b97556c168fd99e1a7305bb363cd30a6d2c76086bf0587b32a/pydantic_core-2.46.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9ce92e58abc722dac1bf835a6798a60b294e48eb0e625ec9fd994b932ac5feee", size = 1934329, upload-time = "2026-04-20T14:43:49.655Z" }, + { url = "https://files.pythonhosted.org/packages/0b/7c/b42eaa5c34b13b07ecb51da21761297a9b8eb43044c864a035999998f328/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a03e6467f0f5ab796a486146d1b887b2dc5e5f9b3288898c1b1c3ad974e53e4a", size = 1974847, upload-time = "2026-04-20T14:42:10.737Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9b/92b42db6543e7de4f99ae977101a2967b63122d4b6cf7773812da2d7d5b5/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2798b6ba041b9d70acfb9071a2ea13c8456dd1e6a5555798e41ba7b0790e329c", size = 2041742, upload-time = "2026-04-20T14:40:44.262Z" }, + { url = "https://files.pythonhosted.org/packages/0f/19/46fbe1efabb5aa2834b43b9454e70f9a83ad9c338c1291e48bdc4fecf167/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9be3e221bdc6d69abf294dcf7aff6af19c31a5cdcc8f0aa3b14be29df4bd03b1", size = 2236235, upload-time = "2026-04-20T14:41:27.307Z" }, + { url = "https://files.pythonhosted.org/packages/77/da/b3f95bc009ad60ec53120f5d16c6faa8cabdbe8a20d83849a1f2b8728148/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f13936129ce841f2a5ddf6f126fea3c43cd128807b5a59588c37cf10178c2e64", size = 2282633, upload-time = "2026-04-20T14:44:33.271Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6e/401336117722e28f32fb8220df676769d28ebdf08f2f4469646d404c43a3/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28b5f2ef03416facccb1c6ef744c69793175fd27e44ef15669201601cf423acb", size = 2109679, upload-time = "2026-04-20T14:44:41.065Z" }, + { url = "https://files.pythonhosted.org/packages/fc/53/b289f9bc8756a32fe718c46f55afaeaf8d489ee18d1a1e7be1db73f42cc4/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:830d1247d77ad23852314f069e9d7ddafeec5f684baf9d7e7065ed46a049c4e6", size = 2108342, upload-time = "2026-04-20T14:42:50.144Z" }, + { url = "https://files.pythonhosted.org/packages/10/5b/8292fc7c1f9111f1b2b7c1b0dcf1179edcd014fc3ea4517499f50b829d71/pydantic_core-2.46.3-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0793c90c1a3c74966e7975eaef3ed30ebdff3260a0f815a62a22adc17e4c01c", size = 2157208, upload-time = "2026-04-20T14:42:08.133Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9e/f80044e9ec07580f057a89fc131f78dda7a58751ddf52bbe05eaf31db50f/pydantic_core-2.46.3-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:d2d0aead851b66f5245ec0c4fb2612ef457f8bbafefdf65a2bf9d6bac6140f47", size = 2167237, upload-time = "2026-04-20T14:42:25.412Z" }, + { url = "https://files.pythonhosted.org/packages/f8/84/6781a1b037f3b96be9227edbd1101f6d3946746056231bf4ac48cdff1a8d/pydantic_core-2.46.3-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:2f40e4246676beb31c5ce77c38a55ca4e465c6b38d11ea1bd935420568e0b1ab", size = 2312540, upload-time = "2026-04-20T14:40:40.313Z" }, + { url = "https://files.pythonhosted.org/packages/3e/db/19c0839feeb728e7df03255581f198dfdf1c2aeb1e174a8420b63c5252e5/pydantic_core-2.46.3-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:cf489cf8986c543939aeee17a09c04d6ffb43bfef8ca16fcbcc5cfdcbed24dba", size = 2369556, upload-time = "2026-04-20T14:41:09.427Z" }, + { url = "https://files.pythonhosted.org/packages/e0/15/3228774cb7cd45f5f721ddf1b2242747f4eb834d0c491f0c02d606f09fed/pydantic_core-2.46.3-cp314-cp314t-win32.whl", hash = "sha256:ffe0883b56cfc05798bf994164d2b2ff03efe2d22022a2bb080f3b626176dd56", size = 1949756, upload-time = "2026-04-20T14:41:25.717Z" }, + { url = "https://files.pythonhosted.org/packages/b8/2a/c79cf53fd91e5a87e30d481809f52f9a60dd221e39de66455cf04deaad37/pydantic_core-2.46.3-cp314-cp314t-win_amd64.whl", hash = "sha256:706d9d0ce9cf4593d07270d8e9f53b161f90c57d315aeec4fb4fd7a8b10240d8", size = 2051305, upload-time = "2026-04-20T14:43:18.627Z" }, + { url = "https://files.pythonhosted.org/packages/0b/db/d8182a7f1d9343a032265aae186eb063fe26ca4c40f256b21e8da4498e89/pydantic_core-2.46.3-cp314-cp314t-win_arm64.whl", hash = "sha256:77706aeb41df6a76568434701e0917da10692da28cb69d5fb6919ce5fdb07374", size = 2026310, upload-time = "2026-04-20T14:41:01.778Z" }, + { url = "https://files.pythonhosted.org/packages/34/42/f426db557e8ab2791bc7562052299944a118655496fbff99914e564c0a94/pydantic_core-2.46.3-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:b12dd51f1187c2eb489af8e20f880362db98e954b54ab792fa5d92e8bcc6b803", size = 2091877, upload-time = "2026-04-20T14:43:27.091Z" }, + { url = "https://files.pythonhosted.org/packages/5c/4f/86a832a9d14df58e663bfdf4627dc00d3317c2bd583c4fb23390b0f04b8e/pydantic_core-2.46.3-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:f00a0961b125f1a47af7bcc17f00782e12f4cd056f83416006b30111d941dfa3", size = 1932428, upload-time = "2026-04-20T14:40:45.781Z" }, + { url = "https://files.pythonhosted.org/packages/11/1a/fe857968954d93fb78e0d4b6df5c988c74c4aaa67181c60be7cfe327c0ca/pydantic_core-2.46.3-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57697d7c056aca4bbb680200f96563e841a6386ac1129370a0102592f4dddff5", size = 1997550, upload-time = "2026-04-20T14:44:02.425Z" }, + { url = "https://files.pythonhosted.org/packages/17/eb/9d89ad2d9b0ba8cd65393d434471621b98912abb10fbe1df08e480ba57b5/pydantic_core-2.46.3-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd35aa21299def8db7ef4fe5c4ff862941a9a158ca7b63d61e66fe67d30416b4", size = 2137657, upload-time = "2026-04-20T14:42:45.149Z" }, +] + +[[package]] +name = "pygments" +version = "2.20.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/b2/bc9c9196916376152d655522fdcebac55e66de6603a76a02bca1b6414f6c/pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f", size = 4955991, upload-time = "2026-03-29T13:29:33.898Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/7e/a72dd26f3b0f4f2bf1dd8923c85f7ceb43172af56d63c7383eb62b332364/pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176", size = 1231151, upload-time = "2026-03-29T13:29:30.038Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7d/0d/549bd94f1a0a402dc8cf64563a117c0f3765662e2e668477624baeec44d5/pytest-9.0.3.tar.gz", hash = "sha256:b86ada508af81d19edeb213c681b1d48246c1a91d304c6c81a427674c17eb91c", size = 1572165, upload-time = "2026-04-07T17:16:18.027Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d4/24/a372aaf5c9b7208e7112038812994107bc65a84cd00e0354a88c2c77a617/pytest-9.0.3-py3-none-any.whl", hash = "sha256:2c5efc453d45394fdd706ade797c0a81091eccd1d6e4bccfcd476e2b8e0ab5d9", size = 375249, upload-time = "2026-04-07T17:16:16.13Z" }, +] + +[[package]] +name = "pytest-agent-digest" +version = "0.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/f8/3256526960418ce2108807bdb2601298f8d64b39ca69382a6e11e53fd153/pytest_agent_digest-0.3.1.tar.gz", hash = "sha256:7c3f6f886d5cccc7350409b31a46ab3a0930d93c8e5b1c4341b919ca903e1072", size = 143017, upload-time = "2026-03-21T17:47:09.525Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/85/79f4cbf7ad59b62664410d59d174e908c4e607ed099b0f8c4267fe72f4f3/pytest_agent_digest-0.3.1-py3-none-any.whl", hash = "sha256:b209b755b0e9076dec0e934a3ed583474c71c900f56d115214b9f62a1cfc1ca8", size = 9356, upload-time = "2026-03-21T17:47:08.173Z" }, +] + +[[package]] +name = "pytest-cov" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage" }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/51/a849f96e117386044471c8ec2bd6cfebacda285da9525c9106aeb28da671/pytest_cov-7.1.0.tar.gz", hash = "sha256:30674f2b5f6351aa09702a9c8c364f6a01c27aae0c1366ae8016160d1efc56b2", size = 55592, upload-time = "2026-03-21T20:11:16.284Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9d/7a/d968e294073affff457b041c2be9868a40c1c71f4a35fcc1e45e5493067b/pytest_cov-7.1.0-py3-none-any.whl", hash = "sha256:a0461110b7865f9a271aa1b51e516c9a95de9d696734a2f71e3e78f46e1d4678", size = 22876, upload-time = "2026-03-21T20:11:14.438Z" }, +] + +[[package]] +name = "pytest-mock" +version = "3.15.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/68/14/eb014d26be205d38ad5ad20d9a80f7d201472e08167f0bb4361e251084a9/pytest_mock-3.15.1.tar.gz", hash = "sha256:1849a238f6f396da19762269de72cb1814ab44416fa73a8686deac10b0d87a0f", size = 34036, upload-time = "2025-09-16T16:37:27.081Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/cc/06253936f4a7fa2e0f48dfe6d851d9c56df896a9ab09ac019d70b760619c/pytest_mock-3.15.1-py3-none-any.whl", hash = "sha256:0a25e2eb88fe5168d535041d09a4529a188176ae608a6d249ee65abc0949630d", size = 10095, upload-time = "2025-09-16T16:37:25.734Z" }, +] + +[[package]] +name = "respx" +version = "0.23.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/43/98/4e55c9c486404ec12373708d015ebce157966965a5ebe7f28ff2c784d41b/respx-0.23.1.tar.gz", hash = "sha256:242dcc6ce6b5b9bf621f5870c82a63997e8e82bc7c947f9ffe272b8f3dd5a780", size = 29243, upload-time = "2026-04-08T14:37:16.008Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/4a/221da6ca167db45693d8d26c7dc79ccfc978a440251bf6721c9aaf251ac0/respx-0.23.1-py2.py3-none-any.whl", hash = "sha256:b18004b029935384bccfa6d7d9d74b4ec9af73a081cc28600fffc0447f4b8c1a", size = 25557, upload-time = "2026-04-08T14:37:14.613Z" }, +] + +[[package]] +name = "structlog" +version = "25.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ef/52/9ba0f43b686e7f3ddfeaa78ac3af750292662284b3661e91ad5494f21dbc/structlog-25.5.0.tar.gz", hash = "sha256:098522a3bebed9153d4570c6d0288abf80a031dfdb2048d59a49e9dc2190fc98", size = 1460830, upload-time = "2025-10-27T08:28:23.028Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/45/a132b9074aa18e799b891b91ad72133c98d8042c70f6240e4c5f9dabee2f/structlog-25.5.0-py3-none-any.whl", hash = "sha256:a8453e9b9e636ec59bd9e79bbd4a72f025981b3ba0f5837aebf48f02f37a7f9f", size = 72510, upload-time = "2025-10-27T08:28:21.535Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] diff --git a/foreman/__main__.py b/foreman/__main__.py index 10b4a35..fdb7622 100644 --- a/foreman/__main__.py +++ b/foreman/__main__.py @@ -20,6 +20,7 @@ from foreman.containers import ContainerError, ContainerManager from foreman.memory import MemoryStore from foreman.poller import GitHubPoller +from foreman.queue import TaskQueue from foreman.routers import Router, RoutingError from foreman.server import Dispatcher, app @@ -30,6 +31,8 @@ #: Default memory DB path. _DEFAULT_DB_PATH = Path.home() / ".agent-harness" / "memory.db" +#: Default queue DB path. +_DEFAULT_QUEUE_DB_PATH = Path.home() / ".agent-harness" / "queue.db" def _build_parser() -> argparse.ArgumentParser: @@ -57,6 +60,12 @@ def _build_parser() -> argparse.ArgumentParser: metavar="DB_PATH", help="Path to the SQLite memory database (default: ~/.agent-harness/memory.db)", ) + start.add_argument( + "--queue-db", + default=None, + metavar="QUEUE_DB_PATH", + help="Path to the SQLite task queue database (default: ~/.agent-harness/queue.db)", + ) start.add_argument( "--host", default="0.0.0.0", @@ -144,38 +153,52 @@ def _run_start(args: Any) -> None: memory = MemoryStore(db_path) # 3. Create core components. - poller = GitHubPoller(token=config.identity.github_token, memory=memory) - dispatcher = Dispatcher(config=config, memory=memory) - - # 4. Start agent containers (if any are configured with image + port). - container_manager: ContainerManager | None = None - agent_urls: dict[str, str] = {} - agent_specs = _collect_agent_images(config) - - if agent_specs: - try: - container_manager = ContainerManager() - except ContainerError as exc: - print(f"Error: {exc}", file=sys.stderr) - sys.exit(1) - for agent_type, image, port in agent_specs: + if args.queue_db is not None: + queue_db_path = Path(args.queue_db) + elif config.queue.db_path is not None: + queue_db_path = config.queue.db_path + else: + queue_db_path = _DEFAULT_QUEUE_DB_PATH + with TaskQueue(queue_db_path, claim_timeout_seconds=config.queue.claim_timeout_seconds) as task_queue: + dispatcher = Dispatcher(config=config, memory=memory, task_queue=task_queue) + + # Expose shared state for the lifespan background loops. + app.state.task_queue = task_queue + app.state.executor = dispatcher.executor + app.state.memory = memory + app.state.config = config + + poller = GitHubPoller(token=config.identity.github_token, memory=memory) + + # 4. Start agent containers (if any are configured with image + port). + container_manager: ContainerManager | None = None + agent_urls: dict[str, str] = {} + agent_specs = _collect_agent_images(config) + + if agent_specs: try: - url = container_manager.start_agent(agent_type, image=image, port=port) - agent_urls[agent_type] = url + container_manager = ContainerManager() except ContainerError as exc: - print(f"Error starting agent '{agent_type}': {exc}", file=sys.stderr) + print(f"Error: {exc}", file=sys.stderr) sys.exit(1) + for agent_type, image, port in agent_specs: + try: + url = container_manager.start_agent(agent_type, image=image, port=port) + agent_urls[agent_type] = url + except ContainerError as exc: + print(f"Error starting agent '{agent_type}': {exc}", file=sys.stderr) + sys.exit(1) - logger.info( - "Foreman initialised", - config=args.config, - db=str(db_path), - repos=[f"{r.owner}/{r.name}" for r in config.repos], - poll_interval_seconds=config.polling.interval_seconds, - ) + logger.info( + "Foreman initialised", + config=args.config, + db=str(db_path), + repos=[f"{r.owner}/{r.name}" for r in config.repos], + poll_interval_seconds=config.polling.interval_seconds, + ) - # 5. Run the poller and HTTP server concurrently. - asyncio.run(_run_loop(config, memory, poller, dispatcher, args.host, args.port, container_manager, agent_urls)) + # 5. Run the poller and HTTP server concurrently. + asyncio.run(_run_loop(config, memory, poller, dispatcher, args.host, args.port, container_manager, agent_urls)) async def _run_loop( diff --git a/foreman/config.py b/foreman/config.py index 5ab678b..62caa6c 100644 --- a/foreman/config.py +++ b/foreman/config.py @@ -102,6 +102,25 @@ class PollingConfig(BaseModel): """How often to poll GitHub for new events (in seconds).""" +class QueueConfig(BaseModel): + """Task queue configuration.""" + + db_path: Optional[Path] = None + """Path to the SQLite queue database; defaults to ``~/.agent-harness/queue.db`` at runtime.""" + + claim_timeout_seconds: int = 300 + """Seconds before an uncompleted claimed task is re-enqueued.""" + + max_retries: int = 3 + """Maximum number of times a task is re-enqueued before being marked failed.""" + + drain_interval_seconds: int = 10 + """How often (seconds) the harness drains completed tasks from the queue.""" + + requeue_interval_seconds: int = 60 + """How often (seconds) the harness checks for stale claimed tasks to re-enqueue.""" + + class AgentAssignment(BaseModel): """A single agent assigned to a repository.""" @@ -140,6 +159,9 @@ class ForemanConfig(BaseModel): polling: PollingConfig = PollingConfig() """GitHub polling settings.""" + queue: QueueConfig = QueueConfig() + """Task queue settings.""" + repos: list[RepoConfig] = [] """Repositories to monitor.""" diff --git a/foreman/queue.py b/foreman/queue.py new file mode 100644 index 0000000..420dc32 --- /dev/null +++ b/foreman/queue.py @@ -0,0 +1,255 @@ +"""SQLite-backed task queue for the queue-mediated agent protocol. + +All reads and writes use the stdlib ``sqlite3`` module directly — no ORM, +no mocks. Tests must use a real temp-file database via ``pytest tmp_path``. +""" + +from __future__ import annotations + +import sqlite3 +import threading +import time +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pathlib import Path + + from foreman.protocol import DecisionMessage, TaskMessage + +_SCHEMA = """ +PRAGMA journal_mode=WAL; + +CREATE TABLE IF NOT EXISTS task_queue ( + task_id TEXT PRIMARY KEY, + agent_url TEXT NOT NULL, + status TEXT NOT NULL DEFAULT 'pending', + payload TEXT NOT NULL, + created_at REAL NOT NULL, + claimed_at REAL, + completed_at REAL, + result TEXT, + retry_count INTEGER NOT NULL DEFAULT 0, + last_heartbeat REAL +); + +CREATE INDEX IF NOT EXISTS idx_task_queue_status ON task_queue (status, agent_url); +""" + + +class TaskQueue: + """Durable task queue backed by a SQLite database. + + Creates the database file and schema on first use. + The connection is kept open for the lifetime of the instance. + + Args: + db_path: Filesystem path to the SQLite database file. + Intermediate directories are created automatically. + claim_timeout_seconds: Seconds before a claimed task without a recent + heartbeat is eligible for re-enqueueing. + """ + + def __init__(self, db_path: Path, claim_timeout_seconds: int = 300) -> None: + self.db_path = db_path + self.claim_timeout_seconds = claim_timeout_seconds + db_path.parent.mkdir(parents=True, exist_ok=True) + # isolation_level=None → autocommit; we manage all transactions manually + # so that BEGIN IMMEDIATE works for atomic claim_next(). + self._conn = sqlite3.connect(str(db_path), check_same_thread=False, isolation_level=None) + self._conn.executescript(_SCHEMA) + self._lock = threading.Lock() + + # ------------------------------------------------------------------ + # Public interface + # ------------------------------------------------------------------ + + def enqueue(self, task: TaskMessage, agent_url: str) -> None: + """Insert a new task with status=pending. + + Args: + task: The task message to enqueue. + agent_url: Base URL of the agent that should process this task. + """ + self._conn.execute( + """ + INSERT INTO task_queue (task_id, agent_url, status, payload, created_at) + VALUES (?, ?, 'pending', ?, ?) + """, + (task.task_id, agent_url, task.model_dump_json(), time.time()), + ) + self._conn.commit() + + def claim_next(self, agent_url: str) -> TaskMessage | None: + """Claim the oldest pending task for agent_url. + + Uses a threading lock plus ``BEGIN IMMEDIATE`` so that concurrent + callers — whether in the same process or different ones — cannot + double-claim the same task. + + Args: + agent_url: The agent URL requesting a task. + + Returns: + The :class:`~foreman.protocol.TaskMessage` for the claimed task, + or ``None`` if the queue has no pending tasks for this agent. + """ + from foreman.protocol import TaskMessage as _TaskMessage + + # _lock serialises same-process threads; BEGIN IMMEDIATE handles + # cross-process / cross-connection contention at the SQLite level. + with self._lock: + self._conn.execute("BEGIN IMMEDIATE") + try: + row = self._conn.execute( + """ + SELECT task_id, payload FROM task_queue + WHERE status = 'pending' AND agent_url = ? + ORDER BY created_at ASC + LIMIT 1 + """, + (agent_url,), + ).fetchone() + if row is None: + self._conn.execute("ROLLBACK") + return None + task_id, payload_json = row + now = time.time() + self._conn.execute( + """ + UPDATE task_queue + SET status = 'claimed', claimed_at = ?, last_heartbeat = ? + WHERE task_id = ? + """, + (now, now, task_id), + ) + self._conn.execute("COMMIT") + except Exception: + self._conn.execute("ROLLBACK") + raise + return _TaskMessage.model_validate_json(payload_json) + + def complete(self, task_id: str, decision: DecisionMessage) -> None: + """Mark a task completed and store the decision result. + + Args: + task_id: ID of the task to mark completed. + decision: The agent's :class:`~foreman.protocol.DecisionMessage`. + """ + self._conn.execute( + """ + UPDATE task_queue + SET status = 'completed', completed_at = ?, result = ? + WHERE task_id = ? + """, + (time.time(), decision.model_dump_json(), task_id), + ) + self._conn.commit() + + def heartbeat(self, task_id: str) -> None: + """Reset last_heartbeat to now, extending the claim window. + + Args: + task_id: ID of the claimed task to heartbeat. + """ + self._conn.execute( + "UPDATE task_queue SET last_heartbeat = ? WHERE task_id = ?", + (time.time(), task_id), + ) + self._conn.commit() + + def drain_completed(self) -> list[tuple[TaskMessage, DecisionMessage]]: + """Return all completed tasks without transitioning their status. + + Called by the harness drain loop. The caller must call + :meth:`mark_done` for each task after it has been successfully + processed, giving at-least-once delivery semantics. + + Returns: + A list of ``(TaskMessage, DecisionMessage)`` tuples for each + completed task. Rows remain ``status=completed`` after this call. + """ + from foreman.protocol import DecisionMessage as _DecisionMessage + from foreman.protocol import TaskMessage as _TaskMessage + + rows = self._conn.execute( + "SELECT task_id, payload, result FROM task_queue WHERE status = 'completed'" + ).fetchall() + if not rows: + return [] + return [ + (_TaskMessage.model_validate_json(payload), _DecisionMessage.model_validate_json(result)) + for _, payload, result in rows + ] + + def mark_done(self, task_id: str) -> None: + """Transition a completed task to done after successful processing. + + Args: + task_id: ID of the completed task to mark done. + """ + self._conn.execute( + "UPDATE task_queue SET status = 'done' WHERE task_id = ? AND status = 'completed'", + (task_id,), + ) + self._conn.commit() + + def requeue_stale(self) -> int: + """Re-enqueue claimed tasks that have exceeded the claim timeout. + + A task is considered stale when both conditions hold: + + - ``status = 'claimed'`` + - ``MAX(claimed_at, last_heartbeat) + claim_timeout_seconds < now`` + + Returns: + The number of tasks re-enqueued. + """ + cutoff = time.time() - self.claim_timeout_seconds + cursor = self._conn.execute( + """ + UPDATE task_queue + SET status = 'pending', claimed_at = NULL, last_heartbeat = NULL, + retry_count = retry_count + 1 + WHERE status = 'claimed' + AND MAX(COALESCE(last_heartbeat, claimed_at), claimed_at) < ? + """, + (cutoff,), + ) + self._conn.commit() + return cursor.rowcount + + def fail_exhausted(self, max_retries: int = 3) -> int: + """Mark tasks that have exceeded max_retries as failed. + + Args: + max_retries: Tasks with ``retry_count >= max_retries`` are marked + ``failed``. + + Returns: + The number of tasks marked failed. + """ + cursor = self._conn.execute( + """ + UPDATE task_queue SET status = 'failed' + WHERE status = 'pending' AND retry_count >= ? + """, + (max_retries,), + ) + self._conn.commit() + return cursor.rowcount + + # ------------------------------------------------------------------ + # Lifecycle + # ------------------------------------------------------------------ + + def close(self) -> None: + """Close the database connection.""" + self._conn.close() + + def __enter__(self) -> TaskQueue: + """Return self for use as a context manager.""" + return self + + def __exit__(self, *_: object) -> None: + """Close the connection on context exit.""" + self.close() diff --git a/foreman/routers/queue.py b/foreman/routers/queue.py new file mode 100644 index 0000000..f0770e8 --- /dev/null +++ b/foreman/routers/queue.py @@ -0,0 +1,120 @@ +"""Queue HTTP endpoints for the queue-mediated agent protocol.""" + +from __future__ import annotations + +import asyncio + +from fastapi import APIRouter, Depends, Request, Response +from pydantic import BaseModel + +from foreman.protocol import DecisionMessage +from foreman.queue import TaskQueue + +router = APIRouter( + prefix="/queue", + tags=["queue"], +) + + +class NextTaskRequest(BaseModel): + """Request body for POST /queue/next. + + Attributes: + agent_url: Base URL of the agent requesting a task. + """ + + agent_url: str + + +class HeartbeatRequest(BaseModel): + """Request body for POST /queue/heartbeat. + + Attributes: + task_id: ID of the task to heartbeat. + """ + + task_id: str + + +def get_task_queue(request: Request) -> TaskQueue: + """Retrieve the TaskQueue from app state. + + Args: + request: The incoming FastAPI request. + + Returns: + The TaskQueue instance attached to app state. + """ + return request.app.state.task_queue + + +def get_drain_event(request: Request) -> asyncio.Event | None: + """Retrieve the drain asyncio.Event from app state. + + Args: + request: The incoming FastAPI request. + + Returns: + The drain asyncio.Event, or None if not yet initialised. + """ + return getattr(request.app.state, "drain_event", None) + + +@router.post("/next") +async def queue_next( + body: NextTaskRequest, + task_queue: TaskQueue = Depends(get_task_queue), +) -> Response: + """Claim and return the next pending task for the requesting agent. + + Args: + body: Request body containing the agent URL. + task_queue: Task queue from app state (injected). + + Returns: + 200 with TaskMessage JSON when a task is available, 204 when the queue is empty. + """ + task = task_queue.claim_next(body.agent_url) + if task is None: + return Response(status_code=204) + return Response(content=task.model_dump_json(), status_code=200, media_type="application/json") + + +@router.post("/complete") +async def queue_complete( + decision: DecisionMessage, + task_queue: TaskQueue = Depends(get_task_queue), + drain_event: asyncio.Event | None = Depends(get_drain_event), +) -> Response: + """Store a completed task decision and signal the drain loop. + + Args: + decision: The agent's DecisionMessage. + task_queue: Task queue from app state (injected). + drain_event: Drain loop event from app state (injected). + + Returns: + 202 Accepted. + """ + task_queue.complete(decision.task_id, decision) + if drain_event is not None: + drain_event.set() + return Response(status_code=202) + + +@router.post("/heartbeat") +async def queue_heartbeat( + body: HeartbeatRequest, + task_queue: TaskQueue = Depends(get_task_queue), +) -> Response: + """Extend the claim window for an in-progress task. + + Args: + body: Request body containing the task ID. + task_queue: Task queue from app state (injected). + + Returns: + 202 Accepted. + """ + task_queue.heartbeat(body.task_id) + return Response(status_code=202) diff --git a/foreman/routers/result.py b/foreman/routers/result.py new file mode 100644 index 0000000..02418c9 --- /dev/null +++ b/foreman/routers/result.py @@ -0,0 +1,58 @@ +"""POST /harness/result endpoint — drain-loop nudge from agents.""" + +from __future__ import annotations + +import asyncio + +import structlog +from fastapi import APIRouter, Depends, Request, Response +from pydantic import BaseModel + +logger = structlog.get_logger(__name__) + +router = APIRouter( + prefix="/harness", + tags=["harness"], +) + + +class ResultNudge(BaseModel): + """Request body for POST /harness/result. + + Attributes: + task_id: ID of the completed task triggering the drain. + """ + + task_id: str + + +def get_drain_event(request: Request) -> asyncio.Event | None: + """Retrieve the drain asyncio.Event from app state. + + Args: + request: The incoming FastAPI request. + + Returns: + The drain asyncio.Event, or None if not yet initialised. + """ + return getattr(request.app.state, "drain_event", None) + + +@router.post("/result") +async def harness_result( + body: ResultNudge, + drain_event: asyncio.Event | None = Depends(get_drain_event), +) -> Response: + """Signal the drain loop that a completed task result is ready. + + Args: + body: Request body containing the completed task ID. + drain_event: Drain loop event from app state (injected). + + Returns: + 202 Accepted. + """ + logger.debug("Drain nudge received", task_id=body.task_id) + if drain_event is not None: + drain_event.set() + return Response(status_code=202) diff --git a/foreman/server.py b/foreman/server.py index 70db4a2..6a8ad80 100644 --- a/foreman/server.py +++ b/foreman/server.py @@ -3,6 +3,8 @@ from __future__ import annotations import asyncio +import contextlib +from contextlib import asynccontextmanager from typing import TYPE_CHECKING, Any import httpx @@ -15,13 +17,18 @@ from foreman.logging_info import configure as configure_logging from foreman.middleware import LogCorrelationIdMiddleware from foreman.otel import configure_otel -from foreman.protocol import DecisionMessage, LLMBackendRef, TaskContext, TaskMessage +from foreman.protocol import LLMBackendRef, TaskContext, TaskMessage from foreman.routers import health +from foreman.routers import queue as queue_router +from foreman.routers import result as result_router from foreman.settings import settings if TYPE_CHECKING: + from collections.abc import AsyncGenerator + from foreman.config import ForemanConfig from foreman.memory import MemoryStore + from foreman.queue import TaskQueue from foreman.routers.agent import RouteTarget configure_logging() @@ -30,48 +37,33 @@ class Dispatcher: - """Orchestrates the harness dispatch loop: fetch memory → build task → POST to agent → execute. + """Orchestrates the harness dispatch loop: fetch memory → build task → enqueue → nudge agent. One :class:`Dispatcher` instance is created at startup and shared across - the entire process. A per-agent-URL :class:`asyncio.Lock` ensures that at - most one task is dispatched concurrently to any given agent endpoint. + the entire process. Tasks are enqueued in the durable :class:`~foreman.queue.TaskQueue` + before the agent is nudged; results are drained asynchronously by the background loop. Args: config: Validated :class:`~foreman.config.ForemanConfig`. memory: Open :class:`~foreman.memory.MemoryStore` instance. + task_queue: Durable :class:`~foreman.queue.TaskQueue` instance. """ - def __init__(self, config: ForemanConfig, memory: MemoryStore) -> None: + def __init__(self, config: ForemanConfig, memory: MemoryStore, task_queue: TaskQueue) -> None: self._config = config self._memory = memory - self._executor = GitHubExecutor(token=str(config.identity.github_token), memory=memory) - self._locks: dict[str, asyncio.Lock] = {} - - def _get_lock(self, url: str) -> asyncio.Lock: - """Return (creating if needed) the per-URL dispatch lock. - - Args: - url: Agent base URL used as the lock key. - - Returns: - The :class:`asyncio.Lock` for this URL. - """ - if url not in self._locks: - self._locks[url] = asyncio.Lock() - return self._locks[url] + self._task_queue = task_queue + self.executor = GitHubExecutor(token=str(config.identity.github_token), memory=memory) async def dispatch(self, event: dict[str, Any], route_target: RouteTarget) -> None: - """Dispatch *event* to the agent described by *route_target*. + """Enqueue *event* for the agent described by *route_target* and nudge it. Sequence: - 1. Acquire per-agent-URL lock (serialise concurrent dispatches). - 2. Fetch memory summary for this repo+issue. - 3. Build a :class:`~foreman.protocol.TaskMessage`. - 4. POST to ``route_target.url/task``. - 5. On non-200 response or network error: log and return. - 6. Parse :class:`~foreman.protocol.DecisionMessage`. - 7. Execute actions via :class:`~foreman.executor.GitHubExecutor`. - 8. Write a summary to memory. + 1. Fetch memory summary for this repo+issue. + 2. Build a :class:`~foreman.protocol.TaskMessage`. + 3. Enqueue the task in the durable queue. + 4. Fire-and-forget ``POST /task`` nudge with ``{"task_id": ...}``. + Network errors are logged and swallowed — the drain loop will retry. Args: event: Poller event dict with ``repo``, ``issue_number``, and ``payload`` keys. @@ -80,7 +72,6 @@ async def dispatch(self, event: dict[str, Any], route_target: RouteTarget) -> No repo: str = event["repo"] issue_number: int = event["issue_number"] payload: dict[str, Any] = event["payload"] - agent = route_target.agent_assignment memory_summary = self._memory.get_memory_summary(repo, issue_number) task = TaskMessage( @@ -96,55 +87,135 @@ async def dispatch(self, event: dict[str, Any], route_target: RouteTarget) -> No ), ) - lock = self._get_lock(route_target.url) - async with lock: + self._task_queue.enqueue(task, agent_url=route_target.url) + logger.info("Task enqueued", task_id=task.task_id, repo=repo, issue_number=issue_number) + + try: + async with httpx.AsyncClient() as client: + await client.post( + f"{route_target.url}/task", + json={"task_id": task.task_id}, + timeout=5.0, + ) + except httpx.HTTPError as exc: + logger.warning( + "Nudge to agent failed; task remains in queue", + url=route_target.url, + task_id=task.task_id, + error=str(exc), + ) + + +async def _drain_loop( + task_queue: TaskQueue, + executor: GitHubExecutor, + memory: MemoryStore, + config: ForemanConfig, + drain_event: asyncio.Event, +) -> None: + """Drain completed tasks from the queue and execute their decisions. + + Wakes on *drain_event* or after ``config.queue.drain_interval_seconds``. + Each ``(TaskMessage, DecisionMessage)`` pair returned by + :meth:`~foreman.queue.TaskQueue.drain_completed` is passed to + :meth:`~foreman.executor.GitHubExecutor.execute` and + :meth:`~foreman.memory.MemoryStore.upsert_memory_summary`. + + Args: + task_queue: The durable task queue. + executor: GitHub action executor. + memory: Memory store for summary updates. + config: Runtime configuration (provides drain interval). + drain_event: Asyncio event that wakes the loop early. + """ + while True: + with contextlib.suppress(asyncio.TimeoutError): + await asyncio.wait_for(drain_event.wait(), timeout=config.queue.drain_interval_seconds) + drain_event.clear() + + try: + pairs = task_queue.drain_completed() + except Exception: + logger.exception("drain_completed failed; skipping cycle") + continue + + for task, decision in pairs: try: - async with httpx.AsyncClient() as client: - response = await client.post( - f"{route_target.url}/task", - json=task.model_dump(), - timeout=60.0, - ) - except httpx.HTTPError as exc: - logger.error( - "HTTP error dispatching task to agent", - url=route_target.url, - error=str(exc), - repo=repo, + issue_number: int = task.payload.get("number", 0) + executor.execute( + decision, + repo=task.repo, issue_number=issue_number, + task_type=task.type, ) - return + summary = f"decision={decision.decision.value}; rationale={decision.rationale}" + memory.upsert_memory_summary(task.repo, issue_number, summary) + task_queue.mark_done(task.task_id) + except Exception: + logger.exception("Failed to process drain task", task_id=task.task_id) - if response.status_code != 200: - logger.error( - "Agent returned non-200 response", - url=route_target.url, - status=response.status_code, - body=response.text, - repo=repo, - issue_number=issue_number, - ) - return + if pairs: + logger.info("Drain loop processed tasks", count=len(pairs)) - decision = DecisionMessage.model_validate(response.json()) - self._executor.execute( - decision, - repo=repo, - issue_number=issue_number, - task_type=task.type, - allow_close=agent.allow_close, - ) +async def _requeue_loop( + task_queue: TaskQueue, + config: ForemanConfig, +) -> None: + """Re-enqueue stale claimed tasks and fail exhausted ones. - summary = f"decision={decision.decision.value}; rationale={decision.rationale}" - self._memory.upsert_memory_summary(repo, issue_number, summary) + Runs on ``config.queue.requeue_interval_seconds`` interval. - logger.info( - "Dispatch complete", - repo=repo, - issue_number=issue_number, - decision=decision.decision.value, - ) + Args: + task_queue: The durable task queue. + config: Runtime configuration (provides requeue interval and max retries). + """ + while True: + await asyncio.sleep(config.queue.requeue_interval_seconds) + try: + requeued = task_queue.requeue_stale() + failed = task_queue.fail_exhausted(max_retries=config.queue.max_retries) + logger.info("Requeue cycle", requeued=requeued, failed=failed) + except Exception: + logger.exception("Requeue cycle failed; retrying on next interval") + + +@asynccontextmanager +async def _lifespan(app: FastAPI) -> AsyncGenerator[None, None]: + """FastAPI lifespan: start background drain and requeue loops. + + Reads ``app.state.task_queue``, ``app.state.executor``, + ``app.state.memory``, and ``app.state.config`` which must be set + by the caller (``__main__.py``) before the server starts. + + Args: + app: The FastAPI application instance. + + Yields: + None: Control passes to FastAPI while background loops are running. + """ + task_queue: TaskQueue = app.state.task_queue + executor: GitHubExecutor = app.state.executor + memory: MemoryStore = app.state.memory + config: ForemanConfig = app.state.config + + drain_event = asyncio.Event() + app.state.drain_event = drain_event + + drain_task = asyncio.create_task(_drain_loop(task_queue, executor, memory, config, drain_event)) + requeue_task = asyncio.create_task(_requeue_loop(task_queue, config)) + + logger.info("Background loops started") + try: + yield + finally: + drain_task.cancel() + requeue_task.cancel() + with contextlib.suppress(asyncio.CancelledError, Exception): + await drain_task + with contextlib.suppress(asyncio.CancelledError, Exception): + await requeue_task + logger.info("Background loops stopped") app: FastAPI = FastAPI( @@ -155,6 +226,7 @@ async def dispatch(self, event: dict[str, Any], route_target: RouteTarget) -> No swagger_ui_parameters={ "persistAuthorization": True, }, + lifespan=_lifespan, ) app.add_middleware( @@ -164,5 +236,7 @@ async def dispatch(self, event: dict[str, Any], route_target: RouteTarget) -> No app.add_middleware(LogCorrelationIdMiddleware) app.include_router(health.router) +app.include_router(queue_router.router) +app.include_router(result_router.router) configure_otel(app, settings) diff --git a/pyproject.toml b/pyproject.toml index 2bab046..3a8e5e6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -90,7 +90,7 @@ disable = [ "MD070", # frontmatter-key-sort ] line-length = 120 -exclude = ["node_modules", "build", "dist", "CHANGELOG.md"] +exclude = ["node_modules", "build", "dist", "CHANGELOG.md", ".memsearch"] include = ["docs/*.md", "README.md"] respect-gitignore = true flavor = "mkdocs" @@ -143,12 +143,17 @@ norecursedirs = [ "venv", "requirements*", "lib", + "foreman-client", + "agents", ] python_files = [ "test_*.py", "*_test.py", "tests.py", ] +markers = [ + "integration: marks tests as integration tests requiring real HTTP/SQLite (run with --run-integration)", +] [tool.interrogate] ignore-init-method = true @@ -257,6 +262,11 @@ dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" typing-modules = ["typing", "types", "typing_extensions", "mypy", "mypy_extensions"] [tool.ruff.lint.per-file-ignores] +"foreman/routers/*.py" = [ + "B008", # FastAPI Depends() in argument defaults is idiomatic + "TC001", # Application imports needed at runtime for FastAPI dependency resolution + "TC003", # Stdlib imports needed at runtime for FastAPI dependency resolution +] "tests/*" = [ "ANN001", # missing-type-function-argument "ANN002", # missing-type-args diff --git a/tests/test_agent_server.py b/tests/test_agent_server.py index c8a9039..2b2f3a4 100644 --- a/tests/test_agent_server.py +++ b/tests/test_agent_server.py @@ -1,25 +1,62 @@ -"""Tests for agents/issue-triage/agent.py — FastAPI server scaffold.""" +"""Tests for agents/issue-triage/agent.py — 202 protocol with ForemanClient.""" from __future__ import annotations import sys from pathlib import Path +from unittest.mock import MagicMock import pytest from fastapi.testclient import TestClient -# Make the agent importable without installing it as a package. +# Make the agent and foreman-client importable without installation. +_CLIENT_DIR = Path(__file__).parent.parent / "foreman-client" _AGENT_DIR = Path(__file__).parent.parent / "agents" / "issue-triage" / "issue_triage" -if str(_AGENT_DIR) not in sys.path: - sys.path.insert(0, str(_AGENT_DIR)) +for _dir in (_CLIENT_DIR, _AGENT_DIR): + if str(_dir) not in sys.path: + sys.path.insert(0, str(_dir)) from agent import app # noqa: E402 +from foremanclient import ForemanClient # noqa: E402 +from foremanclient.models import LLMBackendRef, TaskContext, TaskMessage # noqa: E402 + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _make_task(task_id: str = "test-uuid-001") -> TaskMessage: + """Return a minimal TaskMessage for use in tests.""" + return TaskMessage( + task_id=task_id, + type="issue.triage", + repo="owner/repo", + payload={"issue_number": 42}, + context=TaskContext(llm_backend=LLMBackendRef(provider="anthropic", model="claude-haiku-4-5-20251001")), + ) + + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- @pytest.fixture() -def client() -> TestClient: - """Return a synchronous TestClient for the agent FastAPI app.""" - return TestClient(app) +def mock_foreman_client() -> MagicMock: + """Return a MagicMock with the ForemanClient spec; next_task() returns None by default.""" + mc = MagicMock(spec=ForemanClient) + mc.next_task.return_value = None + return mc + + +@pytest.fixture() +def client(mock_foreman_client: MagicMock) -> TestClient: + """Return a TestClient with a mock ForemanClient injected via app.state.""" + app.state.client = mock_foreman_client + with TestClient(app) as tc: + yield tc + del app.state.client # --------------------------------------------------------------------------- @@ -38,8 +75,7 @@ def test_health_returns_200(self, client: TestClient) -> None: def test_health_response_body(self, client: TestClient) -> None: """Health endpoint returns a JSON body with status ok.""" response = client.get("/health") - data = response.json() - assert data.get("status") == "ok" + assert response.json().get("status") == "ok" # --------------------------------------------------------------------------- @@ -48,75 +84,138 @@ def test_health_response_body(self, client: TestClient) -> None: class TestTaskEndpointValidation: - """POST /task validates the incoming TaskMessage.""" + """POST /task validates the incoming nudge payload.""" def test_invalid_json_returns_422(self, client: TestClient) -> None: """Malformed JSON body returns HTTP 422.""" response = client.post("/task", content=b"not-json", headers={"Content-Type": "application/json"}) assert response.status_code == 422 - def test_missing_required_fields_returns_422(self, client: TestClient) -> None: - """Missing required TaskMessage fields return HTTP 422.""" - response = client.post("/task", json={"task_id": "abc"}) + def test_missing_task_id_returns_422(self, client: TestClient) -> None: + """Missing task_id field returns HTTP 422.""" + response = client.post("/task", json={}) assert response.status_code == 422 # --------------------------------------------------------------------------- -# POST /task — success path (triage logic stubbed) +# POST /task — 202 Accepted + background task # --------------------------------------------------------------------------- -def _valid_task_payload() -> dict: - """Return a minimal valid TaskMessage payload.""" - return { - "task_id": "test-uuid-001", - "type": "issue.triage", - "repo": "owner/repo", - "payload": { - "issue_number": 42, - "title": "Bug: something is broken", - "body": "Steps to reproduce...", - "author": "user123", - "labels": [], - }, - "context": { - "memory_summary": "", - "llm_backend": {"provider": "anthropic", "model": "claude-haiku-4-5-20251001"}, - }, - } - - -class TestTaskEndpointSuccess: - """POST /task with a valid payload returns a DecisionMessage.""" - - def test_valid_task_returns_200(self, client: TestClient, mocker) -> None: - """Valid TaskMessage returns HTTP 200.""" - mocker.patch("agent.triage", return_value=_stub_decision("test-uuid-001")) - response = client.post("/task", json=_valid_task_payload()) - assert response.status_code == 200 +class TestTaskEndpointAccepted: + """POST /task returns 202 immediately and fires a background task.""" + + def test_post_task_returns_202(self, client: TestClient) -> None: + """POST /task returns 202 Accepted — not 200.""" + response = client.post("/task", json={"task_id": "test-uuid-001"}) + assert response.status_code == 202 + + def test_background_task_calls_next_task(self, client: TestClient, mock_foreman_client: MagicMock) -> None: + """Background task calls client.next_task() after nudge received.""" + # Startup poll already called next_task once; reset before POST + mock_foreman_client.reset_mock() + client.post("/task", json={"task_id": "test-uuid-001"}) + mock_foreman_client.next_task.assert_called() + + def test_next_task_returning_none_does_not_crash(self, client: TestClient, mock_foreman_client: MagicMock) -> None: + """When next_task() returns None, background task completes without error.""" + mock_foreman_client.next_task.return_value = None + response = client.post("/task", json={"task_id": "test-uuid-001"}) + assert response.status_code == 202 + + def test_next_task_returning_task_calls_complete( + self, client: TestClient, mock_foreman_client: MagicMock, mocker + ) -> None: + """When next_task() returns a task, complete_task() is called after triage.""" + task = _make_task() + mock_foreman_client.next_task.return_value = task + stub_decision = MagicMock() + mocker.patch("agent.triage", return_value=stub_decision) + client.post("/task", json={"task_id": task.task_id}) + mock_foreman_client.complete_task.assert_called_once_with(task.task_id, stub_decision) + + +# --------------------------------------------------------------------------- +# Startup poll +# --------------------------------------------------------------------------- + + +class TestHeartbeatDuringProcessing: + """_process_task fires heartbeat every 25 s while triage runs.""" + + def test_heartbeat_called_during_triage(self, mock_foreman_client: MagicMock, mocker) -> None: + """heartbeat() is called at least once while triage is running.""" + import threading + + task = _make_task() + heartbeat_called = threading.Event() + + def slow_triage(_task): + # Wait until the heartbeat thread fires at least once + assert heartbeat_called.wait(timeout=2), "heartbeat not called within 2 s" + return MagicMock() + + def record_heartbeat(*args, **kwargs): + heartbeat_called.set() + + mock_foreman_client.heartbeat.side_effect = record_heartbeat + mock_foreman_client.next_task.side_effect = [task, None] + mocker.patch("agent.triage", side_effect=slow_triage) + + # Patch the heartbeat interval to 0.05 s so the test doesn't take 25 s + mocker.patch("agent._HEARTBEAT_INTERVAL", 0.05) + + app.state.client = mock_foreman_client + with TestClient(app): + pass + del app.state.client + + mock_foreman_client.heartbeat.assert_called() + + def test_heartbeat_stopped_after_triage(self, mock_foreman_client: MagicMock, mocker) -> None: + """Heartbeat thread stops firing after triage completes.""" + import time + + task = _make_task() + mock_foreman_client.next_task.side_effect = [task, None] + stub_decision = MagicMock() + mocker.patch("agent.triage", return_value=stub_decision) + mocker.patch("agent._HEARTBEAT_INTERVAL", 0.05) + + app.state.client = mock_foreman_client + with TestClient(app): + pass + del app.state.client + + calls_after_complete = mock_foreman_client.heartbeat.call_count + time.sleep(0.2) # wait to see if more heartbeats fire after triage + assert mock_foreman_client.heartbeat.call_count == calls_after_complete + + +class TestStartupPoll: + """Lifespan startup poll drains all queued tasks on boot.""" + + def test_startup_poll_calls_next_task_once_when_queue_empty(self, mock_foreman_client: MagicMock) -> None: + """Startup poll calls next_task() once when the queue is empty (returns None).""" + app.state.client = mock_foreman_client + with TestClient(app): + pass + del app.state.client + assert mock_foreman_client.next_task.call_count == 1 + + def test_startup_poll_drains_all_queued_tasks(self, mock_foreman_client: MagicMock, mocker) -> None: + """Startup poll loops until next_task() returns None, processing each task.""" + tasks = [_make_task(f"t{i}") for i in range(3)] + mock_foreman_client.next_task.side_effect = [*tasks, None] + stub_decision = MagicMock() + mocker.patch("agent.triage", return_value=stub_decision) + + app.state.client = mock_foreman_client + with TestClient(app): + pass + del app.state.client - def test_response_is_decision_message(self, client: TestClient, mocker) -> None: - """Response body is a valid DecisionMessage.""" - mocker.patch("agent.triage", return_value=_stub_decision("test-uuid-001")) - response = client.post("/task", json=_valid_task_payload()) - data = response.json() - assert "task_id" in data - assert "decision" in data - assert "actions" in data - - def test_task_id_echoed_in_response(self, client: TestClient, mocker) -> None: - """The task_id from the request is echoed in the response.""" - mocker.patch("agent.triage", return_value=_stub_decision("test-uuid-001")) - response = client.post("/task", json=_valid_task_payload()) - data = response.json() - assert data["task_id"] == "test-uuid-001" - - -def _stub_decision(task_id: str) -> dict: - """Return a minimal DecisionMessage dict for stubbing triage().""" - return { - "task_id": task_id, - "decision": "skip", - "rationale": "stub", - "actions": [], - } + # next_task called 4 times: 3 tasks + 1 None to break the loop + assert mock_foreman_client.next_task.call_count == 4 + # All 3 tasks completed + assert mock_foreman_client.complete_task.call_count == 3 diff --git a/tests/test_config.py b/tests/test_config.py index 60b31f1..ad13e92 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -5,7 +5,7 @@ import pytest -from foreman.config import ConfigError, ForemanConfig, load_config +from foreman.config import ConfigError, ForemanConfig, QueueConfig, load_config VALID_YAML = textwrap.dedent("""\ identity: @@ -151,6 +151,69 @@ def test_missing_env_var_raises_config_error( load_config(env_ref_config_file) +class TestQueueConfig: + """Tests for QueueConfig and ForemanConfig.queue integration.""" + + def test_queue_config_defaults(self) -> None: + """QueueConfig has expected default values.""" + q = QueueConfig() + assert q.db_path is None + assert q.claim_timeout_seconds == 300 + assert q.max_retries == 3 + assert q.drain_interval_seconds == 10 + assert q.requeue_interval_seconds == 60 + + def test_foreman_config_queue_defaults_when_absent(self, valid_config_file: Path) -> None: + """ForemanConfig.queue defaults to QueueConfig() when the section is absent.""" + config = load_config(valid_config_file) + assert isinstance(config.queue, QueueConfig) + assert config.queue.db_path is None + assert config.queue.claim_timeout_seconds == 300 + + def test_foreman_config_queue_section_parsed(self, tmp_path: Path) -> None: + """ForemanConfig.queue is populated from the YAML queue section.""" + yaml_text = textwrap.dedent("""\ + identity: + github_token: "ghp_test_token" + github_user: "test-bot" + llm: + provider: anthropic + model: claude-sonnet-4-6 + queue: + db_path: "/tmp/test_queue.db" + claim_timeout_seconds: 120 + max_retries: 5 + drain_interval_seconds: 20 + requeue_interval_seconds: 90 + """) + p = tmp_path / "config.yaml" + p.write_text(yaml_text) + config = load_config(p) + assert config.queue.db_path == Path("/tmp/test_queue.db") + assert config.queue.claim_timeout_seconds == 120 + assert config.queue.max_retries == 5 + assert config.queue.drain_interval_seconds == 20 + assert config.queue.requeue_interval_seconds == 90 + + def test_queue_db_path_env_ref_resolved(self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None: + """${VAR} references in db_path are resolved from environment variables.""" + monkeypatch.setenv("QUEUE_DB_PATH", "/var/run/foreman/queue.db") + yaml_text = textwrap.dedent("""\ + identity: + github_token: "ghp_test_token" + github_user: "test-bot" + llm: + provider: anthropic + model: claude-sonnet-4-6 + queue: + db_path: "${QUEUE_DB_PATH}" + """) + p = tmp_path / "config.yaml" + p.write_text(yaml_text) + config = load_config(p) + assert config.queue.db_path == Path("/var/run/foreman/queue.db") + + class TestConfigRepr: """Tests that secrets do not leak into repr/str output.""" diff --git a/tests/test_executor.py b/tests/test_executor.py index 845b50f..d625fa5 100644 --- a/tests/test_executor.py +++ b/tests/test_executor.py @@ -188,8 +188,9 @@ def test_decision_written_to_action_log(self, executor_and_issue, memory: Memory decision = _make_decision([ActionItem(type="add_label", label="bug")]) executor.execute(decision, repo="owner/repo", issue_number=10) - with sqlite3.connect(memory.db_path) as conn: - row = conn.execute("SELECT repo, issue_id, decision FROM action_log").fetchone() + conn = sqlite3.connect(memory.db_path) + row = conn.execute("SELECT repo, issue_id, decision FROM action_log").fetchone() + conn.close() assert row is not None assert row[0] == "owner/repo" assert row[1] == 10 @@ -208,8 +209,9 @@ def test_decision_logged_before_github_call_fails(self, memory: MemoryStore, moc with pytest.raises(RuntimeError, match="network error"): executor.execute(decision, repo="owner/repo", issue_number=7) - with sqlite3.connect(memory.db_path) as conn: - count = conn.execute("SELECT COUNT(*) FROM action_log").fetchone()[0] + conn = sqlite3.connect(memory.db_path) + count = conn.execute("SELECT COUNT(*) FROM action_log").fetchone()[0] + conn.close() assert count == 1, "log entry must exist even after GitHub failure" def test_task_type_stored_in_action_log(self, executor_and_issue, memory: MemoryStore) -> None: @@ -218,8 +220,9 @@ def test_task_type_stored_in_action_log(self, executor_and_issue, memory: Memory decision = _make_decision([]) executor.execute(decision, repo="owner/repo", issue_number=1, task_type="issue.triage") - with sqlite3.connect(memory.db_path) as conn: - row = conn.execute("SELECT task_type FROM action_log").fetchone() + conn = sqlite3.connect(memory.db_path) + row = conn.execute("SELECT task_type FROM action_log").fetchone() + conn.close() assert row[0] == "issue.triage" def test_rationale_stored_in_action_log(self, executor_and_issue, memory: MemoryStore) -> None: @@ -228,8 +231,9 @@ def test_rationale_stored_in_action_log(self, executor_and_issue, memory: Memory decision = _make_decision([], rationale="Confirmed bug in stack trace.") executor.execute(decision, repo="owner/repo", issue_number=2) - with sqlite3.connect(memory.db_path) as conn: - row = conn.execute("SELECT rationale FROM action_log").fetchone() + conn = sqlite3.connect(memory.db_path) + row = conn.execute("SELECT rationale FROM action_log").fetchone() + conn.close() assert row[0] == "Confirmed bug in stack trace." @@ -263,8 +267,9 @@ def test_empty_actions_list_does_nothing(self, executor_and_issue, memory: Memor issue.create_comment.assert_not_called() issue.edit.assert_not_called() - with sqlite3.connect(memory.db_path) as conn: - count = conn.execute("SELECT COUNT(*) FROM action_log").fetchone()[0] + conn = sqlite3.connect(memory.db_path) + count = conn.execute("SELECT COUNT(*) FROM action_log").fetchone()[0] + conn.close() assert count == 1, "decision must still be logged even with no actions" def test_get_issue_called_with_correct_number(self, memory: MemoryStore, mocker) -> None: diff --git a/tests/test_integration.py b/tests/test_integration.py index c440602..5c9082b 100644 --- a/tests/test_integration.py +++ b/tests/test_integration.py @@ -1,24 +1,30 @@ """End-to-end integration tests for the full issue triage pipeline. Exercises the complete path: - poller event → router → dispatcher → executor (mocked GitHub API) → memory + poller event → router → dispatcher (enqueue + nudge) → queue No live GitHub API or LLM calls are made; boundaries are mocked at the -PyGithub and httpx layers. The MemoryStore uses a real temp-file SQLite DB. +PyGithub and httpx layers. The MemoryStore and TaskQueue use real temp-file +SQLite DBs. """ from __future__ import annotations +import sqlite3 +import sys from pathlib import Path from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest +from fastapi import FastAPI +from fastapi.testclient import TestClient from foreman.config import AgentAssignment, ForemanConfig, IdentityConfig, LLMConfig, RepoConfig from foreman.memory import MemoryStore from foreman.poller import GitHubPoller -from foreman.protocol import ActionItem, DecisionMessage, DecisionType +from foreman.protocol import LLMBackendRef, TaskContext, TaskMessage +from foreman.queue import TaskQueue from foreman.routers.agent import Router from foreman.server import Dispatcher @@ -46,19 +52,6 @@ def _make_event(issue_number: int = _ISSUE_NUMBER) -> dict[str, Any]: } -def _label_and_respond_decision(task_id: str = "task-001") -> DecisionMessage: - """Return a label_and_respond decision with add_label + comment actions.""" - return DecisionMessage( - task_id=task_id, - decision=DecisionType.label_and_respond, - rationale="Reproducible crash — labeling as bug.", - actions=[ - ActionItem(type="add_label", label="bug"), - ActionItem(type="comment", body="Thanks for the report! Labeled as bug."), - ], - ) - - # --------------------------------------------------------------------------- # Fixtures # --------------------------------------------------------------------------- @@ -71,6 +64,13 @@ def memory(tmp_path: Path): yield store +@pytest.fixture() +def task_queue(tmp_path: Path): + """Fresh TaskQueue backed by a real temp-file SQLite DB.""" + with TaskQueue(tmp_path / "queue.db") as queue: + yield queue + + @pytest.fixture() def config() -> ForemanConfig: """ForemanConfig with one repo wired to an issue-triage agent.""" @@ -102,171 +102,111 @@ def router(config: ForemanConfig) -> Router: # --------------------------------------------------------------------------- -# Helper: patch httpx to return a canned agent decision +# Helpers # --------------------------------------------------------------------------- -def _patch_httpx(decision: DecisionMessage): - """Context manager that patches httpx.AsyncClient to return *decision*.""" - - class _Ctx: - def __enter__(self): - self._patcher = patch("foreman.server.httpx.AsyncClient") - mock_cls = self._patcher.start() - mock_client = AsyncMock() - mock_client.__aenter__ = AsyncMock(return_value=mock_client) - mock_client.__aexit__ = AsyncMock(return_value=None) - mock_client.post = AsyncMock( - return_value=MagicMock( - status_code=200, - json=MagicMock(return_value=decision.model_dump()), - ) - ) - mock_cls.return_value = mock_client - self.mock_client = mock_client - return self - - def __exit__(self, *_): - self._patcher.stop() - - return _Ctx() +def _mock_async_client(*, post_side_effect=None): + """Return a context-manager-compatible AsyncClient mock for the nudge POST.""" + mock_client = AsyncMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + if post_side_effect is not None: + mock_client.post = AsyncMock(side_effect=post_side_effect) + else: + resp = MagicMock() + resp.status_code = 202 + mock_client.post = AsyncMock(return_value=resp) + return mock_client # --------------------------------------------------------------------------- -# Full pipeline: event → router → dispatcher → executor → memory +# Full pipeline: event → router → dispatcher → queue # --------------------------------------------------------------------------- class TestFullTriagePipeline: - """End-to-end: route an event, dispatch to agent, execute actions, update memory.""" - - @pytest.mark.asyncio - async def test_label_and_comment_applied_to_github_issue( - self, config: ForemanConfig, memory: MemoryStore, router: Router, mocker - ) -> None: - """label_and_respond decision adds a label and comment on the GitHub issue.""" - mock_gh_cls = mocker.patch("foreman.executor.Github") - mock_issue = MagicMock() - mock_gh_cls.return_value.get_repo.return_value.get_issue.return_value = mock_issue - - dispatcher = Dispatcher(config=config, memory=memory) - event = _make_event() - route_target = router.route("issue.triage", _REPO) - assert route_target is not None - - with _patch_httpx(_label_and_respond_decision()): - await dispatcher.dispatch(event, route_target) - - mock_issue.add_to_labels.assert_called_once_with("bug") - mock_issue.create_comment.assert_called_once_with("Thanks for the report! Labeled as bug.") + """End-to-end: route an event, dispatch to agent (enqueue + nudge), verify queue state.""" @pytest.mark.asyncio - async def test_memory_updated_after_decision( - self, config: ForemanConfig, memory: MemoryStore, router: Router, mocker + async def test_dispatch_enqueues_task_for_correct_agent( + self, config: ForemanConfig, memory: MemoryStore, task_queue: TaskQueue, router: Router, mocker ) -> None: - """Memory summary is written to the DB after a decision is executed.""" + """dispatch() inserts a TaskMessage into the queue with the agent URL.""" mocker.patch("foreman.executor.Github") - dispatcher = Dispatcher(config=config, memory=memory) + dispatcher = Dispatcher(config=config, memory=memory, task_queue=task_queue) route_target = router.route("issue.triage", _REPO) assert route_target is not None - with _patch_httpx(_label_and_respond_decision()): + with patch("foreman.server.httpx.AsyncClient") as mock_cls: + mock_cls.return_value = _mock_async_client() await dispatcher.dispatch(_make_event(), route_target) - summary = memory.get_memory_summary(_REPO, _ISSUE_NUMBER) - assert summary is not None - assert "label_and_respond" in summary + claimed = task_queue.claim_next("http://localhost:9001") + assert claimed is not None + assert claimed.repo == _REPO + assert claimed.type == "issue.triage" @pytest.mark.asyncio - async def test_action_logged_to_db_before_github_call( - self, config: ForemanConfig, memory: MemoryStore, router: Router, mocker + async def test_dispatch_nudge_sends_task_id_to_agent( + self, config: ForemanConfig, memory: MemoryStore, task_queue: TaskQueue, router: Router, mocker ) -> None: - """Decision is written to action_log before any GitHub API call is made.""" - import sqlite3 - - call_order: list[str] = [] - - mock_gh_cls = mocker.patch("foreman.executor.Github") - mock_issue = MagicMock() - - def record_label(label: str) -> None: - # Read DB inside the side-effect to confirm it was written first - with sqlite3.connect(str(memory.db_path)) as conn: - rows = conn.execute("SELECT decision FROM action_log").fetchall() - call_order.append(f"db_rows={len(rows)},github_label={label}") - - mock_issue.add_to_labels.side_effect = record_label - mock_gh_cls.return_value.get_repo.return_value.get_issue.return_value = mock_issue - - dispatcher = Dispatcher(config=config, memory=memory) + """dispatch() nudge POST sends only the task_id (not the full TaskMessage).""" + mocker.patch("foreman.executor.Github") + dispatcher = Dispatcher(config=config, memory=memory, task_queue=task_queue) route_target = router.route("issue.triage", _REPO) assert route_target is not None - with _patch_httpx(_label_and_respond_decision()): + mock_post = AsyncMock(return_value=MagicMock(status_code=202)) + with patch("foreman.server.httpx.AsyncClient") as mock_cls: + mock_client = AsyncMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + mock_client.post = mock_post + mock_cls.return_value = mock_client await dispatcher.dispatch(_make_event(), route_target) - # action_log row existed when the GitHub API was called - assert call_order == ["db_rows=1,github_label=bug"] + mock_post.assert_called_once() + nudge_body = mock_post.call_args[1]["json"] + assert set(nudge_body.keys()) == {"task_id"} @pytest.mark.asyncio - async def test_prior_memory_summary_injected_into_task( - self, config: ForemanConfig, memory: MemoryStore, router: Router, mocker + async def test_prior_memory_summary_injected_into_enqueued_task( + self, config: ForemanConfig, memory: MemoryStore, task_queue: TaskQueue, router: Router, mocker ) -> None: - """Second dispatch injects the memory summary from the first dispatch.""" + """dispatch() injects the stored memory summary into the enqueued TaskMessage.""" mocker.patch("foreman.executor.Github") memory.upsert_memory_summary(_REPO, _ISSUE_NUMBER, "Prior: labeled as bug on 2024-01-01.") - dispatcher = Dispatcher(config=config, memory=memory) + dispatcher = Dispatcher(config=config, memory=memory, task_queue=task_queue) route_target = router.route("issue.triage", _REPO) assert route_target is not None - captured: dict[str, Any] = {} - - async def capture_post(_url: str, **kwargs: Any) -> MagicMock: - captured["context"] = (kwargs.get("json") or {}).get("context", {}) - resp = MagicMock() - resp.status_code = 200 - resp.json.return_value = DecisionMessage( - task_id="t1", - decision=DecisionType.skip, - rationale="Already handled.", - ).model_dump() - return resp - with patch("foreman.server.httpx.AsyncClient") as mock_cls: - mock_client = AsyncMock() - mock_client.__aenter__ = AsyncMock(return_value=mock_client) - mock_client.__aexit__ = AsyncMock(return_value=None) - mock_client.post = capture_post - mock_cls.return_value = mock_client - + mock_cls.return_value = _mock_async_client() await dispatcher.dispatch(_make_event(), route_target) - assert captured["context"]["memory_summary"] == "Prior: labeled as bug on 2024-01-01." + claimed = task_queue.claim_next("http://localhost:9001") + assert claimed is not None + assert claimed.context.memory_summary == "Prior: labeled as bug on 2024-01-01." @pytest.mark.asyncio - async def test_close_action_skipped_when_allow_close_false( - self, config: ForemanConfig, memory: MemoryStore, router: Router, mocker + async def test_task_remains_in_queue_when_nudge_fails( + self, config: ForemanConfig, memory: MemoryStore, task_queue: TaskQueue, router: Router, mocker ) -> None: - """close_issue action is not executed when allow_close is False.""" - mock_gh_cls = mocker.patch("foreman.executor.Github") - mock_issue = MagicMock() - mock_gh_cls.return_value.get_repo.return_value.get_issue.return_value = mock_issue + """Task is durably enqueued even if the nudge POST to the agent fails.""" + import httpx as _httpx - dispatcher = Dispatcher(config=config, memory=memory) + mocker.patch("foreman.executor.Github") + dispatcher = Dispatcher(config=config, memory=memory, task_queue=task_queue) route_target = router.route("issue.triage", _REPO) assert route_target is not None - close_decision = DecisionMessage( - task_id="task-003", - decision=DecisionType.close, - rationale="Stale issue.", - actions=[ActionItem(type="close_issue")], - ) - - with _patch_httpx(close_decision): + with patch("foreman.server.httpx.AsyncClient") as mock_cls: + mock_cls.return_value = _mock_async_client(post_side_effect=_httpx.ConnectError("refused")) await dispatcher.dispatch(_make_event(), route_target) - mock_issue.edit.assert_not_called() + claimed = task_queue.claim_next("http://localhost:9001") + assert claimed is not None # --------------------------------------------------------------------------- @@ -278,13 +218,12 @@ class TestPollerFeedsDispatcher: """Tests that the poller callback chain routes and dispatches correctly.""" @pytest.mark.asyncio - async def test_poller_event_routed_and_dispatched( - self, config: ForemanConfig, memory: MemoryStore, router: Router, mocker + async def test_poller_event_routed_and_enqueued( + self, config: ForemanConfig, memory: MemoryStore, task_queue: TaskQueue, router: Router, mocker ) -> None: - """A polled issue travels through the callback into the dispatcher.""" + """A polled issue travels through the callback into the dispatcher and is enqueued.""" from pydantic import SecretStr - # Mock PyGithub at the poller level to return one issue mock_gh_cls = mocker.patch("foreman.poller.Github") mock_gh_repo = MagicMock() mock_gh_cls.return_value.get_repo.return_value = mock_gh_repo @@ -300,31 +239,194 @@ async def test_poller_event_routed_and_dispatched( mock_gh_repo.get_issues.return_value = [mock_issue] mock_gh_repo.get_collaborators.return_value = [] - # Mock PyGithub at the executor level separately - mock_exec_gh = mocker.patch("foreman.executor.Github") - mock_exec_issue = MagicMock() - mock_exec_gh.return_value.get_repo.return_value.get_issue.return_value = mock_exec_issue + mocker.patch("foreman.executor.Github") poller = GitHubPoller(token=SecretStr("test-token"), memory=memory) - dispatcher = Dispatcher(config=config, memory=memory) + dispatcher = Dispatcher(config=config, memory=memory, task_queue=task_queue) dispatched_events: list[dict[str, Any]] = [] - async def on_event(_repo_config: RepoConfig, event: dict[str, Any]) -> None: + async def on_event(_: RepoConfig, event: dict[str, Any]) -> None: dispatched_events.append(event) route_target = router.route("issue.triage", event["repo"]) if route_target is not None: await dispatcher.dispatch(event, route_target) - with _patch_httpx(_label_and_respond_decision()): + with patch("foreman.server.httpx.AsyncClient") as mock_cls: + mock_cls.return_value = _mock_async_client() await poller.poll_all(config.repos, on_event) assert len(dispatched_events) == 1 assert dispatched_events[0]["issue_number"] == _ISSUE_NUMBER - # GitHub executor was called - mock_exec_issue.add_to_labels.assert_called_once_with("bug") + claimed = task_queue.claim_next("http://localhost:9001") + assert claimed is not None + assert claimed.repo == _REPO + + +# --------------------------------------------------------------------------- +# Helpers for restart-resilience test +# --------------------------------------------------------------------------- + + +def _sqlite_status(db_path: Path, task_id: str) -> str: + """Return the ``status`` column of a task_queue row, or ``'missing'``.""" + conn = sqlite3.connect(str(db_path)) + try: + row = conn.execute("SELECT status FROM task_queue WHERE task_id = ?", (task_id,)).fetchone() + return row[0] if row else "missing" + finally: + conn.close() + + +def _sqlite_action_log(db_path: Path, repo: str, issue_id: int) -> list[tuple[str, str]]: + """Return ``(decision, rationale)`` rows from ``action_log`` for *repo* / *issue_id*.""" + conn = sqlite3.connect(str(db_path)) + try: + return conn.execute( + "SELECT decision, rationale FROM action_log WHERE repo = ? AND issue_id = ?", + (repo, issue_id), + ).fetchall() + finally: + conn.close() + + +# --------------------------------------------------------------------------- +# Integration: agent restart resilience (zero task loss) +# --------------------------------------------------------------------------- + + +@pytest.mark.integration +class TestAgentRestartResilience: + """MVP acceptance criterion: zero task loss under a simulated agent restart. + + The test wires the harness queue endpoints (via a minimal in-process FastAPI + app) to a real SQLite TaskQueue. It uses the actual ForemanClient and agent + startup-poll code, exercising the full claim → process → complete → drain + path without any live network sockets. + """ + + def test_pending_task_claimed_on_restart( + self, + tmp_path: Path, + config: ForemanConfig, + mocker, + ) -> None: + """Task queued while the agent is down is picked up by the startup poll on restart. + + Flow: + + 1. Enqueue a task while the agent is "down" (nudge never reaches it). + 2. Assert the task is ``pending`` in the queue. + 3. "Restart" the agent — lifespan startup poll fires ``next_task()``. + 4. Agent processes the task and calls ``complete_task()``. + 5. Assert the task is ``completed`` (or already ``done``). + 6. Drain manually and execute (simulates the drain loop). + 7. Assert task is ``done`` and ``action_log`` has an entry. + """ + # Make foreman-client and agent importable without installation. + _CLIENT_DIR = Path(__file__).parent.parent / "foreman-client" + _AGENT_DIR = Path(__file__).parent.parent / "agents" / "issue-triage" / "issue_triage" + for _d in (_CLIENT_DIR, _AGENT_DIR): + if str(_d) not in sys.path: + sys.path.insert(0, str(_d)) + + from agent import app as agent_app # noqa: PLC0415 + from foremanclient import ForemanClient # noqa: PLC0415 + from foremanclient.models import DecisionMessage as ForemanDM # noqa: PLC0415 + from foremanclient.models import DecisionType as ForemanDT # noqa: PLC0415 + + queue_db = tmp_path / "queue.db" + memory_db = tmp_path / "memory.db" + mocker.patch("foreman.executor.Github") + + with TaskQueue(queue_db) as task_queue, MemoryStore(memory_db) as memory: + from foreman.executor import GitHubExecutor # noqa: PLC0415 + from foreman.routers import queue as _qr # noqa: PLC0415 + from foreman.routers import result as _rr # noqa: PLC0415 + from foreman.routers.queue import get_drain_event as _qde # noqa: PLC0415 + from foreman.routers.queue import get_task_queue as _gtq # noqa: PLC0415 + from foreman.routers.result import get_drain_event as _rde # noqa: PLC0415 + + executor = GitHubExecutor(token="test-token", memory=memory) + + # Minimal in-process harness: queue endpoints only, no background loops. + mini_harness = FastAPI(title="test-harness") + mini_harness.include_router(_qr.router) + mini_harness.include_router(_rr.router) + mini_harness.dependency_overrides[_gtq] = lambda: task_queue + mini_harness.dependency_overrides[_qde] = lambda: None + mini_harness.dependency_overrides[_rde] = lambda: None + + with TestClient(mini_harness, raise_server_exceptions=True) as harness_tc: + # -- Step 1: enqueue while agent is "down" (no nudge sent) -- + task = TaskMessage( + type="issue.triage", + repo="owner/repo", + payload={ + "number": 42, + "title": "App crashes on startup", + "body": "Steps: run `app start`", + "state": "open", + "user": {"login": "reporter"}, + "labels": [], + }, + context=TaskContext(llm_backend=LLMBackendRef(provider="anthropic", model="claude-sonnet-4-6")), + ) + task_queue.enqueue(task, agent_url="http://localhost:9001") + + # -- Step 2: task must be durable and pending -- + assert _sqlite_status(queue_db, task.task_id) == "pending" + + # Prepare a stub decision so triage requires no LLM call. + stub_decision = ForemanDM( + task_id=task.task_id, + decision=ForemanDT.skip, + rationale="Integration test — skipping via stub", + actions=[], + ) + mocker.patch("agent.triage", return_value=stub_decision) + + # Wire ForemanClient to use harness_tc as its HTTP transport. + # Bypassing __init__ lets us inject the TestClient directly without env vars. + foreman_client = ForemanClient.__new__(ForemanClient) + foreman_client._agent_url = "http://localhost:9001" + foreman_client._http = harness_tc + # Prevent agent lifespan teardown from closing our shared harness_tc. + foreman_client.close = lambda: None # type: ignore[method-assign] + + # -- Step 3 & 4: "restart" agent — lifespan startup poll claims + processes -- + agent_app.state.client = foreman_client + try: + with TestClient(agent_app, raise_server_exceptions=True): + pass # startup poll completes inside lifespan __enter__ + finally: + del agent_app.state.client + + # -- Step 5: startup poll must have completed the task -- + status = _sqlite_status(queue_db, task.task_id) + assert status in ("completed", "done"), ( + f"Expected 'completed' or 'done' after agent restart, got {status!r}" + ) + + # -- Step 6: drain manually (simulates the drain loop) -- + pairs = task_queue.drain_completed() + for drained_task, decision in pairs: + issue_number = drained_task.payload.get("number", 0) + executor.execute( + decision, + repo=drained_task.repo, + issue_number=issue_number, + task_type=drained_task.type, + ) + memory.upsert_memory_summary( + drained_task.repo, + issue_number, + f"decision={decision.decision.value}", + ) - # Memory was updated - summary = memory.get_memory_summary(_REPO, _ISSUE_NUMBER) - assert summary is not None + # -- Step 7: task is done and action_log is populated -- + assert _sqlite_status(queue_db, task.task_id) == "done" + entries = _sqlite_action_log(memory_db, "owner/repo", 42) + assert len(entries) >= 1, "action_log must have at least one entry" + assert entries[0][0] == "skip" diff --git a/tests/test_main.py b/tests/test_main.py index b0639dc..7681c45 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -95,7 +95,7 @@ def test_start_initialises_memory_db(self, tmp_path: Path, mocker) -> None: mock_memory_cls = mocker.patch("foreman.__main__.MemoryStore") mocker.patch("foreman.__main__.GitHubPoller") mocker.patch("foreman.__main__.Dispatcher") - mocker.patch("foreman.__main__.asyncio.run") + mocker.patch("foreman.__main__.asyncio.run", side_effect=lambda c: c.close()) main(["start", "--config", str(config_path)]) @@ -109,7 +109,7 @@ def test_start_runs_asyncio_event_loop(self, tmp_path: Path, mocker) -> None: mocker.patch("foreman.__main__.MemoryStore") mocker.patch("foreman.__main__.GitHubPoller") mocker.patch("foreman.__main__.Dispatcher") - mock_run = mocker.patch("foreman.__main__.asyncio.run") + mock_run = mocker.patch("foreman.__main__.asyncio.run", side_effect=lambda c: c.close()) main(["start", "--config", str(config_path)]) @@ -123,7 +123,7 @@ def test_start_creates_poller(self, tmp_path: Path, mocker) -> None: mocker.patch("foreman.__main__.MemoryStore") mock_poller_cls = mocker.patch("foreman.__main__.GitHubPoller") mocker.patch("foreman.__main__.Dispatcher") - mocker.patch("foreman.__main__.asyncio.run") + mocker.patch("foreman.__main__.asyncio.run", side_effect=lambda c: c.close()) main(["start", "--config", str(config_path)]) @@ -137,12 +137,76 @@ def test_start_creates_dispatcher(self, tmp_path: Path, mocker) -> None: mocker.patch("foreman.__main__.MemoryStore") mocker.patch("foreman.__main__.GitHubPoller") mock_dispatcher_cls = mocker.patch("foreman.__main__.Dispatcher") - mocker.patch("foreman.__main__.asyncio.run") + mocker.patch("foreman.__main__.asyncio.run", side_effect=lambda c: c.close()) main(["start", "--config", str(config_path)]) mock_dispatcher_cls.assert_called_once() + def test_start_creates_task_queue(self, tmp_path: Path, mocker) -> None: + """main() instantiates a TaskQueue and passes it to Dispatcher.""" + config_path = tmp_path / "config.yaml" + _write_minimal_config(config_path) + + mocker.patch("foreman.__main__.MemoryStore") + mocker.patch("foreman.__main__.GitHubPoller") + mocker.patch("foreman.__main__.Dispatcher") + mock_queue_cls = mocker.patch("foreman.__main__.TaskQueue") + mocker.patch("foreman.__main__.asyncio.run", side_effect=lambda c: c.close()) + + main(["start", "--config", str(config_path)]) + + mock_queue_cls.assert_called_once() + + +# --------------------------------------------------------------------------- +# --queue-db CLI argument +# --------------------------------------------------------------------------- + + +class TestQueueDbArg: + """--queue-db overrides config.queue.db_path for TaskQueue construction.""" + + def test_queue_db_arg_overrides_config_path(self, tmp_path: Path, mocker) -> None: + """--queue-db path is passed to TaskQueue when provided.""" + config_path = tmp_path / "config.yaml" + _write_minimal_config(config_path) + custom_db = tmp_path / "custom_queue.db" + + mocker.patch("foreman.__main__.MemoryStore") + mocker.patch("foreman.__main__.GitHubPoller") + mocker.patch("foreman.__main__.Dispatcher") + mock_queue_cls = mocker.patch("foreman.__main__.TaskQueue") + mocker.patch("foreman.__main__.asyncio.run", side_effect=lambda c: c.close()) + + main(["start", "--config", str(config_path), "--queue-db", str(custom_db)]) + + call_args = mock_queue_cls.call_args + assert ( + call_args[0][0] == custom_db + or call_args.args[0] == custom_db + or call_args.kwargs.get("db_path") == custom_db + ) + + def test_queue_db_defaults_to_default_path_when_config_has_none(self, tmp_path: Path, mocker) -> None: + """TaskQueue uses _DEFAULT_QUEUE_DB_PATH when --queue-db is absent and config has no db_path.""" + from foreman.__main__ import _DEFAULT_QUEUE_DB_PATH + + config_path = tmp_path / "config.yaml" + _write_minimal_config(config_path) + + mocker.patch("foreman.__main__.MemoryStore") + mocker.patch("foreman.__main__.GitHubPoller") + mocker.patch("foreman.__main__.Dispatcher") + mock_queue_cls = mocker.patch("foreman.__main__.TaskQueue") + mocker.patch("foreman.__main__.asyncio.run", side_effect=lambda c: c.close()) + + main(["start", "--config", str(config_path)]) + + call_args = mock_queue_cls.call_args + actual_path = call_args[0][0] if call_args[0] else call_args.kwargs.get("db_path") + assert actual_path == _DEFAULT_QUEUE_DB_PATH + # --------------------------------------------------------------------------- # Helpers for container-related tests @@ -272,7 +336,7 @@ def test_no_agent_images_skips_container_manager(self, tmp_path: Path, mocker) - mocker.patch("foreman.__main__.MemoryStore") mocker.patch("foreman.__main__.GitHubPoller") mocker.patch("foreman.__main__.Dispatcher") - mocker.patch("foreman.__main__.asyncio.run") + mocker.patch("foreman.__main__.asyncio.run", side_effect=lambda c: c.close()) mock_cm_cls = mocker.patch("foreman.__main__.ContainerManager") main(["start", "--config", str(config_path)]) @@ -318,7 +382,7 @@ def test_start_agent_called_with_image_and_port(self, tmp_path: Path, mocker) -> mocker.patch("foreman.__main__.MemoryStore") mocker.patch("foreman.__main__.GitHubPoller") mocker.patch("foreman.__main__.Dispatcher") - mocker.patch("foreman.__main__.asyncio.run") + mocker.patch("foreman.__main__.asyncio.run", side_effect=lambda c: c.close()) mock_cm = mocker.MagicMock() mock_cm.start_agent.return_value = "http://localhost:9001" mocker.patch("foreman.__main__.ContainerManager", return_value=mock_cm) diff --git a/tests/test_memory.py b/tests/test_memory.py index b74d117..5b2ddd8 100644 --- a/tests/test_memory.py +++ b/tests/test_memory.py @@ -19,22 +19,25 @@ class TestMemoryStoreInit: def test_creates_db_file(self, tmp_path: Path) -> None: """MemoryStore creates the SQLite DB file at the given path.""" db_path = tmp_path / "memory.db" - MemoryStore(db_path) + m = MemoryStore(db_path) assert db_path.exists() + m.close() def test_creates_parent_directories(self, tmp_path: Path) -> None: """MemoryStore creates intermediate directories if they don't exist.""" db_path = tmp_path / "nested" / "dir" / "memory.db" - MemoryStore(db_path) + m = MemoryStore(db_path) assert db_path.exists() + m.close() def test_action_log_table_exists(self, tmp_path: Path) -> None: """action_log table is created with the correct schema.""" db_path = tmp_path / "memory.db" store = MemoryStore(db_path) store.close() - with sqlite3.connect(db_path) as conn: - rows = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='action_log'").fetchall() + conn = sqlite3.connect(db_path) + rows = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='action_log'").fetchall() + conn.close() assert rows, "action_log table should exist" def test_memory_summary_table_exists(self, tmp_path: Path) -> None: @@ -42,10 +45,9 @@ def test_memory_summary_table_exists(self, tmp_path: Path) -> None: db_path = tmp_path / "memory.db" store = MemoryStore(db_path) store.close() - with sqlite3.connect(db_path) as conn: - rows = conn.execute( - "SELECT name FROM sqlite_master WHERE type='table' AND name='memory_summary'" - ).fetchall() + conn = sqlite3.connect(db_path) + rows = conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='memory_summary'").fetchall() + conn.close() assert rows, "memory_summary table should exist" @@ -69,8 +71,9 @@ def test_log_action_inserts_row(self, store: MemoryStore) -> None: actions=[ActionItem(type="add_label", label="bug")], ) - with sqlite3.connect(store.db_path) as conn: - rows = conn.execute("SELECT * FROM action_log").fetchall() + conn = sqlite3.connect(store.db_path) + rows = conn.execute("SELECT * FROM action_log").fetchall() + conn.close() assert len(rows) == 1 def test_log_action_stores_correct_values(self, store: MemoryStore) -> None: @@ -85,10 +88,9 @@ def test_log_action_stores_correct_values(self, store: MemoryStore) -> None: actions=actions, ) - with sqlite3.connect(store.db_path) as conn: - row = conn.execute( - "SELECT repo, issue_id, task_type, decision, rationale, actions FROM action_log" - ).fetchone() + conn = sqlite3.connect(store.db_path) + row = conn.execute("SELECT repo, issue_id, task_type, decision, rationale, actions FROM action_log").fetchone() + conn.close() assert row[0] == "owner/repo" assert row[1] == 7 @@ -111,8 +113,9 @@ def test_log_action_multiple_entries(self, store: MemoryStore) -> None: actions=[], ) - with sqlite3.connect(store.db_path) as conn: - count = conn.execute("SELECT COUNT(*) FROM action_log").fetchone()[0] + conn = sqlite3.connect(store.db_path) + count = conn.execute("SELECT COUNT(*) FROM action_log").fetchone()[0] + conn.close() assert count == 3 def test_log_action_null_rationale(self, store: MemoryStore) -> None: @@ -126,8 +129,9 @@ def test_log_action_null_rationale(self, store: MemoryStore) -> None: actions=[], ) - with sqlite3.connect(store.db_path) as conn: - row = conn.execute("SELECT rationale FROM action_log").fetchone() + conn = sqlite3.connect(store.db_path) + row = conn.execute("SELECT rationale FROM action_log").fetchone() + conn.close() assert row[0] is None diff --git a/tests/test_queue.py b/tests/test_queue.py new file mode 100644 index 0000000..cc944d8 --- /dev/null +++ b/tests/test_queue.py @@ -0,0 +1,353 @@ +"""Tests for foreman/queue.py — TaskQueue.""" + +from __future__ import annotations + +import threading +import time +from pathlib import Path +from typing import Any, Generator + +import pytest + +from foreman.protocol import ActionItem, DecisionMessage, DecisionType, LLMBackendRef, TaskContext, TaskMessage +from foreman.queue import TaskQueue + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + +_TASK_CONTEXT = TaskContext(llm_backend=LLMBackendRef(provider="anthropic", model="claude-sonnet-4-6")) + + +def _make_task(task_id: str = "task-1") -> TaskMessage: + return TaskMessage( + task_id=task_id, + type="issue.triage", + repo="owner/repo", + payload={"issue": {"number": 1, "title": "Bug"}}, + context=_TASK_CONTEXT, + ) + + +def _make_decision(task_id: str = "task-1") -> DecisionMessage: + return DecisionMessage( + task_id=task_id, + decision=DecisionType.label_and_respond, + rationale="Looks like a bug", + actions=[ActionItem(type="add_label", label="bug")], + ) + + +@pytest.fixture() +def queue(tmp_path: Path) -> Generator[TaskQueue, Any, None]: + """Return a TaskQueue backed by a temp-file SQLite DB.""" + q = TaskQueue(db_path=tmp_path / "queue.db") + yield q + q.close() + + +# --------------------------------------------------------------------------- +# Schema +# --------------------------------------------------------------------------- + + +class TestSchema: + """Verify the task_queue table and index are created on init.""" + + def test_table_exists(self, queue: TaskQueue) -> None: + """task_queue table is present after init.""" + row = queue._conn.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='task_queue'").fetchone() + assert row is not None + + def test_index_exists(self, queue: TaskQueue) -> None: + """idx_task_queue_status index is present after init.""" + row = queue._conn.execute( + "SELECT name FROM sqlite_master WHERE type='index' AND name='idx_task_queue_status'" + ).fetchone() + assert row is not None + + def test_db_file_created(self, tmp_path: Path) -> None: + """DB file and parent directories are auto-created.""" + db_path = tmp_path / "nested" / "dir" / "queue.db" + with TaskQueue(db_path=db_path): + pass + assert db_path.exists() + + +# --------------------------------------------------------------------------- +# enqueue + claim_next (happy path) +# --------------------------------------------------------------------------- + + +class TestEnqueueAndClaimNext: + """enqueue → claim_next round-trip.""" + + def test_enqueue_inserts_pending_task(self, queue: TaskQueue) -> None: + """enqueue inserts a row with status=pending.""" + task = _make_task("t1") + queue.enqueue(task, agent_url="http://agent:9001") + row = queue._conn.execute("SELECT status FROM task_queue WHERE task_id = 't1'").fetchone() + assert row is not None + assert row[0] == "pending" + + def test_claim_next_returns_task_message(self, queue: TaskQueue) -> None: + """claim_next returns the TaskMessage for the claimed task.""" + task = _make_task("t1") + queue.enqueue(task, agent_url="http://agent:9001") + claimed = queue.claim_next(agent_url="http://agent:9001") + assert claimed is not None + assert claimed.task_id == "t1" + assert claimed.type == "issue.triage" + + def test_claim_next_sets_status_claimed(self, queue: TaskQueue) -> None: + """After claim_next the row status is claimed.""" + task = _make_task("t1") + queue.enqueue(task, agent_url="http://agent:9001") + queue.claim_next(agent_url="http://agent:9001") + row = queue._conn.execute("SELECT status FROM task_queue WHERE task_id = 't1'").fetchone() + assert row[0] == "claimed" + + def test_claim_next_empty_queue_returns_none(self, queue: TaskQueue) -> None: + """claim_next returns None when no pending tasks exist.""" + result = queue.claim_next(agent_url="http://agent:9001") + assert result is None + + def test_claim_next_only_returns_matching_agent_url(self, queue: TaskQueue) -> None: + """claim_next only returns tasks enqueued for the given agent_url.""" + task = _make_task("t1") + queue.enqueue(task, agent_url="http://other-agent:9002") + result = queue.claim_next(agent_url="http://agent:9001") + assert result is None + + def test_claim_next_claims_oldest_task_first(self, queue: TaskQueue) -> None: + """claim_next returns tasks in FIFO order (oldest created_at first).""" + for i in range(3): + queue.enqueue(_make_task(f"t{i}"), agent_url="http://agent:9001") + time.sleep(0.01) # ensure distinct created_at values + first = queue.claim_next(agent_url="http://agent:9001") + assert first is not None + assert first.task_id == "t0" + + +# --------------------------------------------------------------------------- +# complete + drain_completed +# --------------------------------------------------------------------------- + + +class TestCompleteAndDrain: + """complete + drain_completed round-trip.""" + + def test_complete_sets_status_completed(self, queue: TaskQueue) -> None: + """complete() sets status=completed and stores the result.""" + task = _make_task("t1") + queue.enqueue(task, agent_url="http://agent:9001") + queue.claim_next(agent_url="http://agent:9001") + queue.complete("t1", _make_decision("t1")) + row = queue._conn.execute("SELECT status FROM task_queue WHERE task_id = 't1'").fetchone() + assert row[0] == "completed" + + def test_drain_completed_returns_task_and_decision(self, queue: TaskQueue) -> None: + """drain_completed returns (TaskMessage, DecisionMessage) tuples.""" + task = _make_task("t1") + decision = _make_decision("t1") + queue.enqueue(task, agent_url="http://agent:9001") + queue.claim_next(agent_url="http://agent:9001") + queue.complete("t1", decision) + results = queue.drain_completed() + assert len(results) == 1 + returned_task, returned_decision = results[0] + assert returned_task.task_id == "t1" + assert returned_decision.task_id == "t1" + assert returned_decision.decision == DecisionType.label_and_respond + + def test_drain_completed_leaves_rows_in_completed_state(self, queue: TaskQueue) -> None: + """drain_completed does not transition rows — they remain completed after the call.""" + task = _make_task("t1") + queue.enqueue(task, agent_url="http://agent:9001") + queue.claim_next(agent_url="http://agent:9001") + queue.complete("t1", _make_decision("t1")) + queue.drain_completed() + row = queue._conn.execute("SELECT status FROM task_queue WHERE task_id = 't1'").fetchone() + assert row[0] == "completed" + + def test_drain_completed_empty_returns_empty_list(self, queue: TaskQueue) -> None: + """drain_completed returns [] when no completed tasks exist.""" + assert queue.drain_completed() == [] + + def test_drain_completed_does_not_return_done_tasks(self, queue: TaskQueue) -> None: + """drain_completed does not return tasks that have been marked done.""" + task = _make_task("t1") + queue.enqueue(task, agent_url="http://agent:9001") + queue.claim_next(agent_url="http://agent:9001") + queue.complete("t1", _make_decision("t1")) + queue.drain_completed() + queue.mark_done("t1") + second = queue.drain_completed() + assert len(second) == 0 + + +# --------------------------------------------------------------------------- +# mark_done +# --------------------------------------------------------------------------- + + +class TestMarkDone: + """mark_done transitions a completed row to done.""" + + def test_mark_done_transitions_row_to_done(self, queue: TaskQueue) -> None: + """mark_done sets status=done for the target task.""" + task = _make_task("t1") + queue.enqueue(task, agent_url="http://agent:9001") + queue.claim_next(agent_url="http://agent:9001") + queue.complete("t1", _make_decision("t1")) + queue.mark_done("t1") + row = queue._conn.execute("SELECT status FROM task_queue WHERE task_id = 't1'").fetchone() + assert row[0] == "done" + + def test_mark_done_leaves_other_rows_untouched(self, queue: TaskQueue) -> None: + """mark_done only transitions the specified task_id.""" + for tid in ("t1", "t2"): + task = _make_task(tid) + queue.enqueue(task, agent_url="http://agent:9001") + queue.claim_next(agent_url="http://agent:9001") + queue.complete(tid, _make_decision(tid)) + queue.mark_done("t1") + row = queue._conn.execute("SELECT status FROM task_queue WHERE task_id = 't2'").fetchone() + assert row[0] == "completed" + + +# --------------------------------------------------------------------------- +# heartbeat +# --------------------------------------------------------------------------- + + +class TestHeartbeat: + """heartbeat updates last_heartbeat.""" + + def test_heartbeat_updates_last_heartbeat(self, queue: TaskQueue) -> None: + """heartbeat() updates the last_heartbeat column.""" + task = _make_task("t1") + queue.enqueue(task, agent_url="http://agent:9001") + queue.claim_next(agent_url="http://agent:9001") + before = queue._conn.execute("SELECT last_heartbeat FROM task_queue WHERE task_id = 't1'").fetchone()[0] + time.sleep(0.01) + queue.heartbeat("t1") + after = queue._conn.execute("SELECT last_heartbeat FROM task_queue WHERE task_id = 't1'").fetchone()[0] + assert after is not None + assert after > (before or 0) + + +# --------------------------------------------------------------------------- +# requeue_stale +# --------------------------------------------------------------------------- + + +class TestRequeueStale: + """requeue_stale re-enqueues timed-out claimed tasks.""" + + def test_requeue_stale_re_enqueues_timed_out_task(self, tmp_path: Path) -> None: + """A claimed task past the timeout is re-enqueued and retry_count incremented.""" + with TaskQueue(db_path=tmp_path / "queue.db", claim_timeout_seconds=1) as q: + task = _make_task("t1") + q.enqueue(task, agent_url="http://agent:9001") + q.claim_next(agent_url="http://agent:9001") + + # Force both claimed_at and last_heartbeat into the past to simulate timeout + past = time.time() - 10 + q._conn.execute( + "UPDATE task_queue SET claimed_at = ?, last_heartbeat = ? WHERE task_id = 't1'", + (past, past), + ) + + count = q.requeue_stale() + assert count == 1 + + row = q._conn.execute("SELECT status, retry_count FROM task_queue WHERE task_id = 't1'").fetchone() + assert row[0] == "pending" + assert row[1] == 1 + + def test_requeue_stale_ignores_fresh_claims(self, queue: TaskQueue) -> None: + """A recently claimed task is not re-enqueued.""" + task = _make_task("t1") + queue.enqueue(task, agent_url="http://agent:9001") + queue.claim_next(agent_url="http://agent:9001") + count = queue.requeue_stale() + assert count == 0 + + def test_requeue_stale_ignores_heartbeated_task(self, tmp_path: Path) -> None: + """A task with a recent heartbeat is not re-enqueued even if claimed_at is old.""" + with TaskQueue(db_path=tmp_path / "queue.db", claim_timeout_seconds=1) as q: + task = _make_task("t1") + q.enqueue(task, agent_url="http://agent:9001") + q.claim_next(agent_url="http://agent:9001") + + # Age the claimed_at but keep last_heartbeat fresh + now = time.time() + q._conn.execute( + "UPDATE task_queue SET claimed_at = ?, last_heartbeat = ? WHERE task_id = 't1'", + (now - 10, now), + ) + q._conn.commit() + + count = q.requeue_stale() + assert count == 0 + + +# --------------------------------------------------------------------------- +# fail_exhausted +# --------------------------------------------------------------------------- + + +class TestFailExhausted: + """fail_exhausted marks tasks at max_retries as failed.""" + + def test_fail_exhausted_marks_task_failed(self, queue: TaskQueue) -> None: + """A pending task at max_retries is marked failed.""" + task = _make_task("t1") + queue.enqueue(task, agent_url="http://agent:9001") + queue._conn.execute("UPDATE task_queue SET retry_count = 3 WHERE task_id = 't1'") + queue._conn.commit() + count = queue.fail_exhausted(max_retries=3) + assert count == 1 + row = queue._conn.execute("SELECT status FROM task_queue WHERE task_id = 't1'").fetchone() + assert row[0] == "failed" + + def test_fail_exhausted_ignores_tasks_below_max(self, queue: TaskQueue) -> None: + """Tasks with retry_count < max_retries are not failed.""" + task = _make_task("t1") + queue.enqueue(task, agent_url="http://agent:9001") + queue._conn.execute("UPDATE task_queue SET retry_count = 2 WHERE task_id = 't1'") + queue._conn.commit() + count = queue.fail_exhausted(max_retries=3) + assert count == 0 + + +# --------------------------------------------------------------------------- +# Concurrent claim safety +# --------------------------------------------------------------------------- + + +class TestConcurrentClaim: + """Two threads calling claim_next simultaneously claim only one task each.""" + + def test_concurrent_claim_no_double_claim(self, queue: TaskQueue) -> None: + """Only one thread claims a task when two race simultaneously.""" + queue.enqueue(_make_task("t1"), agent_url="http://agent:9001") + + results: list[TaskMessage | None] = [] + lock = threading.Lock() + + def claim() -> None: + result = queue.claim_next(agent_url="http://agent:9001") + with lock: + results.append(result) + + t1 = threading.Thread(target=claim) + t2 = threading.Thread(target=claim) + t1.start() + t2.start() + t1.join() + t2.join() + + claimed = [r for r in results if r is not None] + assert len(claimed) == 1 diff --git a/tests/test_queue_router.py b/tests/test_queue_router.py new file mode 100644 index 0000000..b5d907f --- /dev/null +++ b/tests/test_queue_router.py @@ -0,0 +1,173 @@ +"""Tests for foreman/routers/queue.py — queue HTTP endpoints.""" + +from __future__ import annotations + +from unittest.mock import MagicMock + +import pytest +from fastapi.testclient import TestClient + +from foreman.protocol import ActionItem, DecisionMessage, DecisionType, LLMBackendRef, TaskContext, TaskMessage +from foreman.queue import TaskQueue +from foreman.routers.queue import get_drain_event, get_task_queue +from foreman.server import app + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + +_TASK_CONTEXT = TaskContext(llm_backend=LLMBackendRef(provider="anthropic", model="claude-sonnet-4-6")) + + +def _make_task(task_id: str = "task-1") -> TaskMessage: + return TaskMessage( + task_id=task_id, + type="issue.triage", + repo="owner/repo", + payload={"issue": {"number": 1}}, + context=_TASK_CONTEXT, + ) + + +def _make_decision(task_id: str = "task-1") -> DecisionMessage: + return DecisionMessage( + task_id=task_id, + decision=DecisionType.label_and_respond, + rationale="Looks like a bug", + actions=[ActionItem(type="add_label", label="bug")], + ) + + +@pytest.fixture() +def mock_queue() -> MagicMock: + """Return a MagicMock with the TaskQueue spec.""" + return MagicMock(spec=TaskQueue) + + +@pytest.fixture() +def mock_drain_event() -> MagicMock: + """Return a MagicMock standing in for asyncio.Event.""" + return MagicMock() + + +@pytest.fixture() +def client(mock_queue: MagicMock, mock_drain_event: MagicMock) -> TestClient: + """Return a TestClient with queue dependencies overridden.""" + app.dependency_overrides[get_task_queue] = lambda: mock_queue + app.dependency_overrides[get_drain_event] = lambda: mock_drain_event + with TestClient(app) as c: + yield c + app.dependency_overrides.clear() + + +# --------------------------------------------------------------------------- +# POST /queue/next +# --------------------------------------------------------------------------- + + +class TestQueueNext: + """POST /queue/next — claim next pending task.""" + + def test_returns_200_with_task_when_available(self, client: TestClient, mock_queue: MagicMock) -> None: + """Returns 200 and TaskMessage JSON when a task is available.""" + task = _make_task("task-abc") + mock_queue.claim_next.return_value = task + + response = client.post("/queue/next", json={"agent_url": "http://agent:9001"}) + + assert response.status_code == 200 + body = response.json() + assert body["task_id"] == "task-abc" + assert body["type"] == "issue.triage" + + def test_claim_next_called_with_agent_url(self, client: TestClient, mock_queue: MagicMock) -> None: + """claim_next is called with the agent_url from the request body.""" + mock_queue.claim_next.return_value = None + + client.post("/queue/next", json={"agent_url": "http://agent:9001"}) + + mock_queue.claim_next.assert_called_once_with("http://agent:9001") + + def test_returns_204_when_queue_empty(self, client: TestClient, mock_queue: MagicMock) -> None: + """Returns 204 No Content when claim_next returns None.""" + mock_queue.claim_next.return_value = None + + response = client.post("/queue/next", json={"agent_url": "http://agent:9001"}) + + assert response.status_code == 204 + assert response.content == b"" + + def test_response_is_valid_task_message_json(self, client: TestClient, mock_queue: MagicMock) -> None: + """The 200 response body deserialises to a valid TaskMessage.""" + task = _make_task("task-xyz") + mock_queue.claim_next.return_value = task + + response = client.post("/queue/next", json={"agent_url": "http://agent:9001"}) + + assert response.status_code == 200 + parsed = TaskMessage.model_validate(response.json()) + assert parsed.task_id == "task-xyz" + + +# --------------------------------------------------------------------------- +# POST /queue/complete +# --------------------------------------------------------------------------- + + +class TestQueueComplete: + """POST /queue/complete — store decision and signal drain.""" + + def test_returns_202(self, client: TestClient) -> None: + """Returns 202 Accepted.""" + decision = _make_decision("task-1") + + response = client.post( + "/queue/complete", content=decision.model_dump_json(), headers={"content-type": "application/json"} + ) + + assert response.status_code == 202 + + def test_calls_complete_with_task_id_and_decision(self, client: TestClient, mock_queue: MagicMock) -> None: + """TaskQueue.complete() is called with the task_id and full DecisionMessage.""" + decision = _make_decision("task-1") + + client.post( + "/queue/complete", content=decision.model_dump_json(), headers={"content-type": "application/json"} + ) + + mock_queue.complete.assert_called_once() + args = mock_queue.complete.call_args + assert args[0][0] == "task-1" + stored: DecisionMessage = args[0][1] + assert stored.decision == DecisionType.label_and_respond + + def test_sets_drain_event(self, client: TestClient, mock_drain_event: MagicMock) -> None: + """The drain event is set after storing the decision.""" + decision = _make_decision("task-1") + + client.post( + "/queue/complete", content=decision.model_dump_json(), headers={"content-type": "application/json"} + ) + + mock_drain_event.set.assert_called_once() + + +# --------------------------------------------------------------------------- +# POST /queue/heartbeat +# --------------------------------------------------------------------------- + + +class TestQueueHeartbeat: + """POST /queue/heartbeat — extend claim window.""" + + def test_returns_202(self, client: TestClient) -> None: + """Returns 202 Accepted.""" + response = client.post("/queue/heartbeat", json={"task_id": "task-1"}) + + assert response.status_code == 202 + + def test_calls_heartbeat_with_task_id(self, client: TestClient, mock_queue: MagicMock) -> None: + """TaskQueue.heartbeat() is called with the correct task_id.""" + client.post("/queue/heartbeat", json={"task_id": "task-99"}) + + mock_queue.heartbeat.assert_called_once_with("task-99") diff --git a/tests/test_result_router.py b/tests/test_result_router.py new file mode 100644 index 0000000..b1f6702 --- /dev/null +++ b/tests/test_result_router.py @@ -0,0 +1,61 @@ +"""Tests for foreman/routers/result.py — POST /harness/result endpoint.""" + +from __future__ import annotations + +from unittest.mock import MagicMock + +import pytest +from fastapi.testclient import TestClient + +from foreman.routers.result import get_drain_event +from foreman.server import app + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + + +@pytest.fixture() +def mock_drain_event() -> MagicMock: + """Return a MagicMock standing in for asyncio.Event.""" + return MagicMock() + + +@pytest.fixture() +def client(mock_drain_event: MagicMock) -> TestClient: + """Return a TestClient with drain_event dependency overridden.""" + app.dependency_overrides[get_drain_event] = lambda: mock_drain_event + with TestClient(app) as c: + yield c + app.dependency_overrides.clear() + + +# --------------------------------------------------------------------------- +# POST /harness/result +# --------------------------------------------------------------------------- + + +class TestHarnessResult: + """POST /harness/result — drain-loop nudge.""" + + def test_returns_202(self, client: TestClient) -> None: + """Returns 202 Accepted.""" + response = client.post("/harness/result", json={"task_id": "task-1"}) + + assert response.status_code == 202 + + def test_sets_drain_event(self, client: TestClient, mock_drain_event: MagicMock) -> None: + """The drain event is set when the nudge is received.""" + client.post("/harness/result", json={"task_id": "task-1"}) + + mock_drain_event.set.assert_called_once() + + def test_drain_event_none_does_not_raise(self) -> None: + """Endpoint returns 202 even when drain_event is not initialised (None).""" + app.dependency_overrides[get_drain_event] = lambda: None + try: + with TestClient(app) as c: + response = c.post("/harness/result", json={"task_id": "task-1"}) + assert response.status_code == 202 + finally: + app.dependency_overrides.clear() diff --git a/tests/test_server.py b/tests/test_server.py index 2e31bac..654c59e 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -2,19 +2,21 @@ from __future__ import annotations +import asyncio +from contextlib import suppress from pathlib import Path from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import httpx import pytest -import pytest_asyncio from foreman.config import AgentAssignment, ForemanConfig, IdentityConfig, LLMConfig, RepoConfig from foreman.memory import MemoryStore from foreman.protocol import ActionItem, DecisionMessage, DecisionType +from foreman.queue import TaskQueue from foreman.routers.agent import RouteTarget -from foreman.server import Dispatcher +from foreman.server import Dispatcher, _drain_loop, _requeue_loop # --------------------------------------------------------------------------- # Fixtures @@ -28,6 +30,13 @@ def memory(tmp_path: Path): yield store +@pytest.fixture() +def task_queue(tmp_path: Path): + """Provide a fresh TaskQueue backed by a temp-file DB.""" + with TaskQueue(tmp_path / "queue.db") as queue: + yield queue + + @pytest.fixture() def config() -> ForemanConfig: """Minimal ForemanConfig for tests.""" @@ -80,47 +89,94 @@ def _make_event(repo: str = "owner/repo", issue_number: int = 42) -> dict[str, A } +def _make_dispatcher(config, memory, task_queue, mocker) -> Dispatcher: + """Construct a Dispatcher with Github patched out.""" + mocker.patch("foreman.executor.Github") + return Dispatcher(config=config, memory=memory, task_queue=task_queue) + + +def _mock_async_client(*, post_return=None, post_side_effect=None): + """Return a context-manager-compatible AsyncClient mock.""" + mock_client = AsyncMock() + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + if post_side_effect is not None: + mock_client.post = AsyncMock(side_effect=post_side_effect) + else: + resp = MagicMock() + resp.status_code = 202 + mock_client.post = AsyncMock(return_value=post_return or resp) + return mock_client + + # --------------------------------------------------------------------------- # Dispatcher initialisation # --------------------------------------------------------------------------- class TestDispatcherInit: - """Dispatcher can be constructed from config and memory.""" + """Dispatcher can be constructed from config, memory, and task_queue.""" - def test_instantiates(self, config: ForemanConfig, memory: MemoryStore, mocker) -> None: + def test_instantiates(self, config: ForemanConfig, memory: MemoryStore, task_queue: TaskQueue, mocker) -> None: """Dispatcher is created without errors.""" mocker.patch("foreman.executor.Github") - dispatcher = Dispatcher(config=config, memory=memory) + dispatcher = Dispatcher(config=config, memory=memory, task_queue=task_queue) assert isinstance(dispatcher, Dispatcher) # --------------------------------------------------------------------------- -# Dispatch: happy path +# Dispatch: enqueue + nudge # --------------------------------------------------------------------------- -class TestDispatchHappyPath: - """dispatch() sends a task and executes the returned decision.""" +class TestDispatchEnqueues: + """dispatch() enqueues the task and sends a fire-and-forget nudge.""" @pytest.mark.asyncio - async def test_dispatch_posts_to_agent_url(self, config, memory, route_target, skip_decision, mocker) -> None: - """dispatch() POSTs a TaskMessage to route_target.url + '/task'.""" - mocker.patch("foreman.executor.Github") - dispatcher = Dispatcher(config=config, memory=memory) + async def test_dispatch_enqueues_task_for_agent_url( + self, config, memory, task_queue, route_target, mocker + ) -> None: + """dispatch() inserts the task into the queue for route_target.url.""" + dispatcher = _make_dispatcher(config, memory, task_queue, mocker) - mock_response = MagicMock() - mock_response.status_code = 200 - mock_response.json.return_value = skip_decision.model_dump() - mock_post = AsyncMock(return_value=mock_response) + with patch("foreman.server.httpx.AsyncClient") as mock_cls: + mock_cls.return_value = _mock_async_client() + await dispatcher.dispatch(_make_event(), route_target) + + claimed = task_queue.claim_next("http://localhost:8001") + assert claimed is not None + assert claimed.repo == "owner/repo" + + @pytest.mark.asyncio + async def test_dispatch_injects_memory_summary_into_enqueued_task( + self, config, memory, task_queue, route_target, mocker + ) -> None: + """dispatch() fetches and injects the memory summary into the enqueued TaskMessage.""" + memory.upsert_memory_summary("owner/repo", 42, "Prior: labeled as bug.") + dispatcher = _make_dispatcher(config, memory, task_queue, mocker) + + with patch("foreman.server.httpx.AsyncClient") as mock_cls: + mock_cls.return_value = _mock_async_client() + await dispatcher.dispatch(_make_event(issue_number=42), route_target) + + claimed = task_queue.claim_next("http://localhost:8001") + assert claimed is not None + assert claimed.context.memory_summary == "Prior: labeled as bug." + + @pytest.mark.asyncio + async def test_dispatch_sends_nudge_to_agent_task_endpoint( + self, config, memory, task_queue, route_target, mocker + ) -> None: + """dispatch() sends POST /task as a nudge.""" + dispatcher = _make_dispatcher(config, memory, task_queue, mocker) + mock_post = AsyncMock(return_value=MagicMock(status_code=202)) - with patch("foreman.server.httpx.AsyncClient") as mock_client_cls: + with patch("foreman.server.httpx.AsyncClient") as mock_cls: mock_client = AsyncMock() mock_client.__aenter__ = AsyncMock(return_value=mock_client) mock_client.__aexit__ = AsyncMock(return_value=None) mock_client.post = mock_post - mock_client_cls.return_value = mock_client - + mock_cls.return_value = mock_client await dispatcher.dispatch(_make_event(), route_target) mock_post.assert_called_once() @@ -128,169 +184,340 @@ async def test_dispatch_posts_to_agent_url(self, config, memory, route_target, s assert call_url == "http://localhost:8001/task" @pytest.mark.asyncio - async def test_dispatch_injects_memory_summary_into_task( - self, config, memory, route_target, skip_decision, mocker + async def test_dispatch_nudge_body_contains_task_id( + self, config, memory, task_queue, route_target, mocker ) -> None: - """dispatch() fetches and injects the memory summary before sending the task.""" - mocker.patch("foreman.executor.Github") - memory.upsert_memory_summary("owner/repo", 42, "Prior: labeled as bug.") - dispatcher = Dispatcher(config=config, memory=memory) - - posted_body: dict = {} - - async def capture_post(url, **kwargs): - posted_body.update(kwargs.get("json", {})) - resp = MagicMock() - resp.status_code = 200 - resp.json.return_value = skip_decision.model_dump() - return resp + """dispatch() nudge body is {"task_id": } — not the full TaskMessage.""" + dispatcher = _make_dispatcher(config, memory, task_queue, mocker) + mock_post = AsyncMock(return_value=MagicMock(status_code=202)) - with patch("foreman.server.httpx.AsyncClient") as mock_client_cls: + with patch("foreman.server.httpx.AsyncClient") as mock_cls: mock_client = AsyncMock() mock_client.__aenter__ = AsyncMock(return_value=mock_client) mock_client.__aexit__ = AsyncMock(return_value=None) - mock_client.post = capture_post - mock_client_cls.return_value = mock_client - - await dispatcher.dispatch(_make_event(issue_number=42), route_target) + mock_client.post = mock_post + mock_cls.return_value = mock_client + await dispatcher.dispatch(_make_event(), route_target) - assert posted_body["context"]["memory_summary"] == "Prior: labeled as bug." + nudge_json = mock_post.call_args[1]["json"] + assert set(nudge_json.keys()) == {"task_id"} + assert nudge_json["task_id"] # non-empty @pytest.mark.asyncio - async def test_dispatch_executes_actions_from_decision( - self, config, memory, route_target, label_decision, mocker + async def test_dispatch_does_not_parse_decision_from_agent( + self, config, memory, task_queue, route_target, mocker ) -> None: - """dispatch() calls the executor with the returned DecisionMessage.""" - mock_gh_cls = mocker.patch("foreman.executor.Github") - mock_issue = MagicMock() - mock_gh_cls.return_value.get_repo.return_value.get_issue.return_value = mock_issue - dispatcher = Dispatcher(config=config, memory=memory) - + """dispatch() does not parse a DecisionMessage from the agent response.""" + dispatcher = _make_dispatcher(config, memory, task_queue, mocker) + + # Agent returns a full DecisionMessage body — dispatch() must ignore it + decision_body = DecisionMessage( + task_id="task-001", + decision=DecisionType.skip, + rationale="Ignore me.", + actions=[], + ).model_dump() mock_response = MagicMock() mock_response.status_code = 200 - mock_response.json.return_value = label_decision.model_dump() + mock_response.json.return_value = decision_body - with patch("foreman.server.httpx.AsyncClient") as mock_client_cls: + with patch("foreman.server.httpx.AsyncClient") as mock_cls: mock_client = AsyncMock() mock_client.__aenter__ = AsyncMock(return_value=mock_client) mock_client.__aexit__ = AsyncMock(return_value=None) mock_client.post = AsyncMock(return_value=mock_response) - mock_client_cls.return_value = mock_client - + mock_cls.return_value = mock_client + # Must not raise even though we're not parsing the response await dispatcher.dispatch(_make_event(), route_target) - mock_issue.add_to_labels.assert_called_once_with("bug") + # Task is in queue — executor was NOT called from dispatch + claimed = task_queue.claim_next("http://localhost:8001") + assert claimed is not None + + +# --------------------------------------------------------------------------- +# Dispatch: nudge errors are swallowed +# --------------------------------------------------------------------------- + + +class TestDispatchNudgeErrors: + """dispatch() swallows nudge errors; the enqueue still happens.""" @pytest.mark.asyncio - async def test_dispatch_updates_memory_summary_after_decision( - self, config, memory, route_target, label_decision, mocker + async def test_nudge_connection_error_is_logged_and_swallowed( + self, config, memory, task_queue, route_target, mocker ) -> None: - """dispatch() writes a summary to memory after executing a decision.""" - mocker.patch("foreman.executor.Github") - dispatcher = Dispatcher(config=config, memory=memory) + """A network error on the nudge POST does not raise from dispatch().""" + dispatcher = _make_dispatcher(config, memory, task_queue, mocker) - mock_response = MagicMock() - mock_response.status_code = 200 - mock_response.json.return_value = label_decision.model_dump() + with patch("foreman.server.httpx.AsyncClient") as mock_cls: + mock_cls.return_value = _mock_async_client(post_side_effect=httpx.ConnectError("refused")) + # Must not raise + await dispatcher.dispatch(_make_event(), route_target) - with patch("foreman.server.httpx.AsyncClient") as mock_client_cls: - mock_client = AsyncMock() - mock_client.__aenter__ = AsyncMock(return_value=mock_client) - mock_client.__aexit__ = AsyncMock(return_value=None) - mock_client.post = AsyncMock(return_value=mock_response) - mock_client_cls.return_value = mock_client + @pytest.mark.asyncio + async def test_task_is_enqueued_even_when_nudge_fails( + self, config, memory, task_queue, route_target, mocker + ) -> None: + """Task is in the queue even if the nudge POST throws.""" + dispatcher = _make_dispatcher(config, memory, task_queue, mocker) - await dispatcher.dispatch(_make_event(issue_number=42), route_target) + with patch("foreman.server.httpx.AsyncClient") as mock_cls: + mock_cls.return_value = _mock_async_client(post_side_effect=httpx.ConnectError("refused")) + await dispatcher.dispatch(_make_event(), route_target) - summary = memory.get_memory_summary("owner/repo", 42) - assert summary is not None + claimed = task_queue.claim_next("http://localhost:8001") + assert claimed is not None # --------------------------------------------------------------------------- -# Dispatch: agent HTTP errors +# Background loops: drain # --------------------------------------------------------------------------- -class TestDispatchAgentErrors: - """dispatch() handles non-200 agent responses gracefully.""" +def _make_task_in_queue(task_queue: TaskQueue, agent_url: str = "http://agent") -> tuple: + """Enqueue, claim, and complete a task; return (task_msg, decision_msg).""" + from foreman.protocol import LLMBackendRef, TaskContext, TaskMessage + + task_msg = TaskMessage( + task_id="drain-task-001", + type="issue.triage", + repo="owner/repo", + payload={"number": 42, "title": "Crash", "body": ""}, + context=TaskContext( + llm_backend=LLMBackendRef(provider="anthropic", model="claude-sonnet-4-6"), + ), + ) + decision_msg = DecisionMessage( + task_id="drain-task-001", + decision=DecisionType.label_and_respond, + rationale="Bug confirmed.", + actions=[ActionItem(type="add_label", label="bug")], + ) + task_queue.enqueue(task_msg, agent_url=agent_url) + task_queue.claim_next(agent_url) + task_queue.complete(task_msg.task_id, decision_msg) + return task_msg, decision_msg + + +class TestDrainLoop: + """_drain_loop() drains completed tasks and calls executor + memory.""" @pytest.mark.asyncio - async def test_non_200_response_is_logged_and_skipped(self, config, memory, route_target, mocker) -> None: - """A non-200 response from the agent logs and does not raise.""" - mocker.patch("foreman.executor.Github") - dispatcher = Dispatcher(config=config, memory=memory) + async def test_drain_loop_calls_executor_for_completed_task( + self, config: ForemanConfig, memory: MemoryStore, task_queue: TaskQueue, mocker + ) -> None: + """drain_loop calls executor.execute() for each completed task.""" + mock_executor = MagicMock() + _make_task_in_queue(task_queue) - mock_response = MagicMock() - mock_response.status_code = 500 - mock_response.text = "Internal Server Error" + drain_event = asyncio.Event() + drain_event.set() - with patch("foreman.server.httpx.AsyncClient") as mock_client_cls: - mock_client = AsyncMock() - mock_client.__aenter__ = AsyncMock(return_value=mock_client) - mock_client.__aexit__ = AsyncMock(return_value=None) - mock_client.post = AsyncMock(return_value=mock_response) - mock_client_cls.return_value = mock_client + loop_task = asyncio.create_task(_drain_loop(task_queue, mock_executor, memory, config, drain_event)) + await asyncio.sleep(0.01) + loop_task.cancel() + with suppress(asyncio.CancelledError): + await loop_task - # Should not raise - await dispatcher.dispatch(_make_event(), route_target) + mock_executor.execute.assert_called_once() + call_kwargs = mock_executor.execute.call_args[1] + assert call_kwargs["repo"] == "owner/repo" + assert call_kwargs["issue_number"] == 42 @pytest.mark.asyncio - async def test_connection_error_is_logged_and_skipped(self, config, memory, route_target, mocker) -> None: - """A network error posting to the agent logs and does not raise.""" - mocker.patch("foreman.executor.Github") - dispatcher = Dispatcher(config=config, memory=memory) + async def test_drain_loop_updates_memory_for_completed_task( + self, config: ForemanConfig, memory: MemoryStore, task_queue: TaskQueue, mocker + ) -> None: + """drain_loop calls memory.upsert_memory_summary() for each completed task.""" + mock_executor = MagicMock() + _make_task_in_queue(task_queue) - with patch("foreman.server.httpx.AsyncClient") as mock_client_cls: - mock_client = AsyncMock() - mock_client.__aenter__ = AsyncMock(return_value=mock_client) - mock_client.__aexit__ = AsyncMock(return_value=None) - mock_client.post = AsyncMock(side_effect=httpx.ConnectError("refused")) - mock_client_cls.return_value = mock_client + drain_event = asyncio.Event() + drain_event.set() - # Should not raise - await dispatcher.dispatch(_make_event(), route_target) + loop_task = asyncio.create_task(_drain_loop(task_queue, mock_executor, memory, config, drain_event)) + await asyncio.sleep(0.01) + loop_task.cancel() + with suppress(asyncio.CancelledError): + await loop_task + summary = memory.get_memory_summary("owner/repo", 42) + assert summary is not None + assert "label_and_respond" in summary -# --------------------------------------------------------------------------- -# Dispatch: concurrency lock -# --------------------------------------------------------------------------- + @pytest.mark.asyncio + async def test_drain_loop_wakes_on_drain_event(self, config: ForemanConfig, memory: MemoryStore, mocker) -> None: + """drain_loop wakes immediately when drain_event is set.""" + mock_task_queue = MagicMock() + mock_task_queue.drain_completed.return_value = [] + mock_executor = MagicMock() + + drain_event = asyncio.Event() + loop_task = asyncio.create_task(_drain_loop(mock_task_queue, mock_executor, memory, config, drain_event)) + drain_event.set() + await asyncio.sleep(0.01) + loop_task.cancel() + with suppress(asyncio.CancelledError): + await loop_task -class TestDispatchConcurrencyLock: - """dispatch() does not run concurrent tasks to the same agent URL.""" + mock_task_queue.drain_completed.assert_called() @pytest.mark.asyncio - async def test_second_dispatch_to_same_url_waits( - self, config, memory, route_target, skip_decision, mocker - ) -> None: - """A second concurrent dispatch to the same URL is serialised.""" - import asyncio + async def test_drain_loop_cancelled_cleanly(self, config: ForemanConfig, memory: MemoryStore, mocker) -> None: + """drain_loop raises no unhandled error when cancelled.""" + mock_task_queue = MagicMock() + mock_task_queue.drain_completed.return_value = [] + mock_executor = MagicMock() + drain_event = asyncio.Event() + + loop_task = asyncio.create_task(_drain_loop(mock_task_queue, mock_executor, memory, config, drain_event)) + await asyncio.sleep(0) + loop_task.cancel() + with suppress(asyncio.CancelledError): + await loop_task - mocker.patch("foreman.executor.Github") - dispatcher = Dispatcher(config=config, memory=memory) + @pytest.mark.asyncio + async def test_drain_loop_survives_executor_exception( + self, config: ForemanConfig, memory: MemoryStore, task_queue: TaskQueue + ) -> None: + """An executor exception is caught; the loop keeps running and task stays completed.""" + mock_executor = MagicMock() + mock_executor.execute.side_effect = RuntimeError("GitHub API error") + _make_task_in_queue(task_queue) - call_order: list[str] = [] + drain_event = asyncio.Event() + drain_event.set() - async def slow_post(url, **kwargs): - call_order.append("start") - await asyncio.sleep(0) # yield to event loop - call_order.append("end") - resp = MagicMock() - resp.status_code = 200 - resp.json.return_value = skip_decision.model_dump() - return resp + loop_task = asyncio.create_task(_drain_loop(task_queue, mock_executor, memory, config, drain_event)) + await asyncio.sleep(0.02) + loop_task.cancel() + with suppress(asyncio.CancelledError): + await loop_task - with patch("foreman.server.httpx.AsyncClient") as mock_client_cls: - mock_client = AsyncMock() - mock_client.__aenter__ = AsyncMock(return_value=mock_client) - mock_client.__aexit__ = AsyncMock(return_value=None) - mock_client.post = slow_post - mock_client_cls.return_value = mock_client + # Task stays 'completed' — mark_done was not called because executor failed + row = task_queue._conn.execute("SELECT status FROM task_queue WHERE task_id = 'drain-task-001'").fetchone() + assert row[0] == "completed" - await asyncio.gather( - dispatcher.dispatch(_make_event(), route_target), - dispatcher.dispatch(_make_event(issue_number=99), route_target), + @pytest.mark.asyncio + async def test_drain_loop_processes_remaining_tasks_after_exception( + self, config: ForemanConfig, memory: MemoryStore, task_queue: TaskQueue + ) -> None: + """An executor exception on one task does not skip other tasks in the same drain batch.""" + from foreman.protocol import DecisionType, LLMBackendRef, TaskContext, TaskMessage + + for suffix in ("-A", "-B"): + task = TaskMessage( + task_id=f"drain-task{suffix}", + type="issue.triage", + repo="owner/repo", + payload={"number": 1}, + context=TaskContext(llm_backend=LLMBackendRef(provider="anthropic", model="claude-sonnet-4-6")), + ) + decision = DecisionMessage( + task_id=f"drain-task{suffix}", + decision=DecisionType.skip, + rationale="r", + actions=[], ) + task_queue.enqueue(task, agent_url="http://agent") + task_queue.claim_next(agent_url="http://agent") + task_queue.complete(task.task_id, decision) + + # Fail on first call, succeed on second + call_count = 0 + + def side_effect(*args, **kwargs) -> None: + nonlocal call_count + call_count += 1 + if call_count == 1: + raise RuntimeError("first task fails") + + mock_executor = MagicMock() + mock_executor.execute.side_effect = side_effect + + drain_event = asyncio.Event() + drain_event.set() + + loop_task = asyncio.create_task(_drain_loop(task_queue, mock_executor, memory, config, drain_event)) + await asyncio.sleep(0.02) + loop_task.cancel() + with suppress(asyncio.CancelledError): + await loop_task - # Serialised: first task fully completes before second starts - assert call_order == ["start", "end", "start", "end"] + # One task failed (still 'completed'), one succeeded ('done') + statuses = dict(task_queue._conn.execute("SELECT task_id, status FROM task_queue").fetchall()) + completed_count = sum(1 for s in statuses.values() if s == "completed") + done_count = sum(1 for s in statuses.values() if s == "done") + assert completed_count == 1 + assert done_count == 1 + + +# --------------------------------------------------------------------------- +# Background loops: requeue +# --------------------------------------------------------------------------- + + +class TestRequeueLoop: + """_requeue_loop() calls requeue_stale() and fail_exhausted() on each cycle.""" + + @pytest.mark.asyncio + async def test_requeue_loop_calls_requeue_and_fail_exhausted(self) -> None: + """requeue_loop calls both requeue_stale() and fail_exhausted() on each cycle.""" + from foreman.config import QueueConfig + + # Use requeue_interval_seconds=0 so asyncio.sleep(0) yields properly without mocking. + config = ForemanConfig( + identity=IdentityConfig(github_token="t", github_user="b"), + llm=LLMConfig(provider="anthropic", model="claude-sonnet-4-6"), + repos=[], + queue=QueueConfig(requeue_interval_seconds=0, max_retries=3), + ) + mock_task_queue = MagicMock() + mock_task_queue.requeue_stale.return_value = 0 + mock_task_queue.fail_exhausted.return_value = 0 + + loop_task = asyncio.create_task(_requeue_loop(mock_task_queue, config)) + await asyncio.sleep(0.01) + loop_task.cancel() + with suppress(asyncio.CancelledError): + await loop_task + + mock_task_queue.requeue_stale.assert_called() + mock_task_queue.fail_exhausted.assert_called_with(max_retries=3) + + @pytest.mark.asyncio + async def test_requeue_loop_cancelled_cleanly(self, config: ForemanConfig) -> None: + """requeue_loop raises no unhandled error when cancelled.""" + mock_task_queue = MagicMock() + + loop_task = asyncio.create_task(_requeue_loop(mock_task_queue, config)) + await asyncio.sleep(0) + loop_task.cancel() + with suppress(asyncio.CancelledError): + await loop_task + + @pytest.mark.asyncio + async def test_requeue_loop_survives_requeue_stale_exception(self) -> None: + """A requeue-loop iteration that raises does not kill the loop.""" + from foreman.config import IdentityConfig, LLMConfig, QueueConfig + + fast_config = ForemanConfig( + identity=IdentityConfig(github_token="t", github_user="b"), + llm=LLMConfig(provider="anthropic", model="claude-sonnet-4-6"), + repos=[], + queue=QueueConfig(requeue_interval_seconds=0, max_retries=3), + ) + mock_task_queue = MagicMock() + mock_task_queue.requeue_stale.side_effect = RuntimeError("db error") + mock_task_queue.fail_exhausted.return_value = 0 + + loop_task = asyncio.create_task(_requeue_loop(mock_task_queue, fast_config)) + await asyncio.sleep(0.02) + loop_task.cancel() + with suppress(asyncio.CancelledError): + await loop_task + + # Loop ran multiple iterations without dying + assert mock_task_queue.requeue_stale.call_count >= 2 diff --git a/tests/test_triage_logic.py b/tests/test_triage_logic.py index 4634a57..2fffae1 100644 --- a/tests/test_triage_logic.py +++ b/tests/test_triage_logic.py @@ -8,12 +8,14 @@ import pytest -# Make the agent importable. +# Make the agent and foreman-client importable without installation. +_CLIENT_DIR = Path(__file__).parent.parent / "foreman-client" _AGENT_DIR = Path(__file__).parent.parent / "agents" / "issue-triage" / "issue_triage" -if str(_AGENT_DIR) not in sys.path: - sys.path.insert(0, str(_AGENT_DIR)) +for _dir in (_CLIENT_DIR, _AGENT_DIR): + if str(_dir) not in sys.path: + sys.path.insert(0, str(_dir)) -from agent import ActionItem, DecisionMessage, TaskContext, TaskMessage, LLMBackendRef # noqa: E402 +from foremanclient.models import ActionItem, DecisionMessage, LLMBackendRef, TaskContext, TaskMessage # noqa: E402 from prompts.triage import build_prompt, parse_llm_response, run_triage # noqa: E402 diff --git a/uv.lock b/uv.lock index aa27a02..71bae90 100644 --- a/uv.lock +++ b/uv.lock @@ -18,7 +18,7 @@ wheels = [ [[package]] name = "aiohttp" -version = "3.13.3" +version = "3.13.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohappyeyeballs" }, @@ -29,76 +29,76 @@ dependencies = [ { name = "propcache" }, { name = "yarl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/50/42/32cf8e7704ceb4481406eb87161349abb46a57fee3f008ba9cb610968646/aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88", size = 7844556, upload-time = "2026-01-03T17:33:05.204Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/be/4fc11f202955a69e0db803a12a062b8379c970c7c84f4882b6da17337cc1/aiohttp-3.13.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c", size = 739732, upload-time = "2026-01-03T17:30:14.23Z" }, - { url = "https://files.pythonhosted.org/packages/97/2c/621d5b851f94fa0bb7430d6089b3aa970a9d9b75196bc93bb624b0db237a/aiohttp-3.13.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168", size = 494293, upload-time = "2026-01-03T17:30:15.96Z" }, - { url = "https://files.pythonhosted.org/packages/5d/43/4be01406b78e1be8320bb8316dc9c42dbab553d281c40364e0f862d5661c/aiohttp-3.13.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d", size = 493533, upload-time = "2026-01-03T17:30:17.431Z" }, - { url = "https://files.pythonhosted.org/packages/8d/a8/5a35dc56a06a2c90d4742cbf35294396907027f80eea696637945a106f25/aiohttp-3.13.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d32764c6c9aafb7fb55366a224756387cd50bfa720f32b88e0e6fa45b27dcf29", size = 1737839, upload-time = "2026-01-03T17:30:19.422Z" }, - { url = "https://files.pythonhosted.org/packages/bf/62/4b9eeb331da56530bf2e198a297e5303e1c1ebdceeb00fe9b568a65c5a0c/aiohttp-3.13.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b1a6102b4d3ebc07dad44fbf07b45bb600300f15b552ddf1851b5390202ea2e3", size = 1703932, upload-time = "2026-01-03T17:30:21.756Z" }, - { url = "https://files.pythonhosted.org/packages/7c/f6/af16887b5d419e6a367095994c0b1332d154f647e7dc2bd50e61876e8e3d/aiohttp-3.13.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c014c7ea7fb775dd015b2d3137378b7be0249a448a1612268b5a90c2d81de04d", size = 1771906, upload-time = "2026-01-03T17:30:23.932Z" }, - { url = "https://files.pythonhosted.org/packages/ce/83/397c634b1bcc24292fa1e0c7822800f9f6569e32934bdeef09dae7992dfb/aiohttp-3.13.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2b8d8ddba8f95ba17582226f80e2de99c7a7948e66490ef8d947e272a93e9463", size = 1871020, upload-time = "2026-01-03T17:30:26Z" }, - { url = "https://files.pythonhosted.org/packages/86/f6/a62cbbf13f0ac80a70f71b1672feba90fdb21fd7abd8dbf25c0105fb6fa3/aiohttp-3.13.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ae8dd55c8e6c4257eae3a20fd2c8f41edaea5992ed67156642493b8daf3cecc", size = 1755181, upload-time = "2026-01-03T17:30:27.554Z" }, - { url = "https://files.pythonhosted.org/packages/0a/87/20a35ad487efdd3fba93d5843efdfaa62d2f1479eaafa7453398a44faf13/aiohttp-3.13.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:01ad2529d4b5035578f5081606a465f3b814c542882804e2e8cda61adf5c71bf", size = 1561794, upload-time = "2026-01-03T17:30:29.254Z" }, - { url = "https://files.pythonhosted.org/packages/de/95/8fd69a66682012f6716e1bc09ef8a1a2a91922c5725cb904689f112309c4/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bb4f7475e359992b580559e008c598091c45b5088f28614e855e42d39c2f1033", size = 1697900, upload-time = "2026-01-03T17:30:31.033Z" }, - { url = "https://files.pythonhosted.org/packages/e5/66/7b94b3b5ba70e955ff597672dad1691333080e37f50280178967aff68657/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c19b90316ad3b24c69cd78d5c9b4f3aa4497643685901185b65166293d36a00f", size = 1728239, upload-time = "2026-01-03T17:30:32.703Z" }, - { url = "https://files.pythonhosted.org/packages/47/71/6f72f77f9f7d74719692ab65a2a0252584bf8d5f301e2ecb4c0da734530a/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:96d604498a7c782cb15a51c406acaea70d8c027ee6b90c569baa6e7b93073679", size = 1740527, upload-time = "2026-01-03T17:30:34.695Z" }, - { url = "https://files.pythonhosted.org/packages/fa/b4/75ec16cbbd5c01bdaf4a05b19e103e78d7ce1ef7c80867eb0ace42ff4488/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:084911a532763e9d3dd95adf78a78f4096cd5f58cdc18e6fdbc1b58417a45423", size = 1554489, upload-time = "2026-01-03T17:30:36.864Z" }, - { url = "https://files.pythonhosted.org/packages/52/8f/bc518c0eea29f8406dcf7ed1f96c9b48e3bc3995a96159b3fc11f9e08321/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7a4a94eb787e606d0a09404b9c38c113d3b099d508021faa615d70a0131907ce", size = 1767852, upload-time = "2026-01-03T17:30:39.433Z" }, - { url = "https://files.pythonhosted.org/packages/9d/f2/a07a75173124f31f11ea6f863dc44e6f09afe2bca45dd4e64979490deab1/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:87797e645d9d8e222e04160ee32aa06bc5c163e8499f24db719e7852ec23093a", size = 1722379, upload-time = "2026-01-03T17:30:41.081Z" }, - { url = "https://files.pythonhosted.org/packages/3c/4a/1a3fee7c21350cac78e5c5cef711bac1b94feca07399f3d406972e2d8fcd/aiohttp-3.13.3-cp312-cp312-win32.whl", hash = "sha256:b04be762396457bef43f3597c991e192ee7da460a4953d7e647ee4b1c28e7046", size = 428253, upload-time = "2026-01-03T17:30:42.644Z" }, - { url = "https://files.pythonhosted.org/packages/d9/b7/76175c7cb4eb73d91ad63c34e29fc4f77c9386bba4a65b53ba8e05ee3c39/aiohttp-3.13.3-cp312-cp312-win_amd64.whl", hash = "sha256:e3531d63d3bdfa7e3ac5e9b27b2dd7ec9df3206a98e0b3445fa906f233264c57", size = 455407, upload-time = "2026-01-03T17:30:44.195Z" }, - { url = "https://files.pythonhosted.org/packages/97/8a/12ca489246ca1faaf5432844adbfce7ff2cc4997733e0af120869345643a/aiohttp-3.13.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5dff64413671b0d3e7d5918ea490bdccb97a4ad29b3f311ed423200b2203e01c", size = 734190, upload-time = "2026-01-03T17:30:45.832Z" }, - { url = "https://files.pythonhosted.org/packages/32/08/de43984c74ed1fca5c014808963cc83cb00d7bb06af228f132d33862ca76/aiohttp-3.13.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:87b9aab6d6ed88235aa2970294f496ff1a1f9adcd724d800e9b952395a80ffd9", size = 491783, upload-time = "2026-01-03T17:30:47.466Z" }, - { url = "https://files.pythonhosted.org/packages/17/f8/8dd2cf6112a5a76f81f81a5130c57ca829d101ad583ce57f889179accdda/aiohttp-3.13.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:425c126c0dc43861e22cb1c14ba4c8e45d09516d0a3ae0a3f7494b79f5f233a3", size = 490704, upload-time = "2026-01-03T17:30:49.373Z" }, - { url = "https://files.pythonhosted.org/packages/6d/40/a46b03ca03936f832bc7eaa47cfbb1ad012ba1be4790122ee4f4f8cba074/aiohttp-3.13.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f9120f7093c2a32d9647abcaf21e6ad275b4fbec5b55969f978b1a97c7c86bf", size = 1720652, upload-time = "2026-01-03T17:30:50.974Z" }, - { url = "https://files.pythonhosted.org/packages/f7/7e/917fe18e3607af92657e4285498f500dca797ff8c918bd7d90b05abf6c2a/aiohttp-3.13.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:697753042d57f4bf7122cab985bf15d0cef23c770864580f5af4f52023a56bd6", size = 1692014, upload-time = "2026-01-03T17:30:52.729Z" }, - { url = "https://files.pythonhosted.org/packages/71/b6/cefa4cbc00d315d68973b671cf105b21a609c12b82d52e5d0c9ae61d2a09/aiohttp-3.13.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6de499a1a44e7de70735d0b39f67c8f25eb3d91eb3103be99ca0fa882cdd987d", size = 1759777, upload-time = "2026-01-03T17:30:54.537Z" }, - { url = "https://files.pythonhosted.org/packages/fb/e3/e06ee07b45e59e6d81498b591fc589629be1553abb2a82ce33efe2a7b068/aiohttp-3.13.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:37239e9f9a7ea9ac5bf6b92b0260b01f8a22281996da609206a84df860bc1261", size = 1861276, upload-time = "2026-01-03T17:30:56.512Z" }, - { url = "https://files.pythonhosted.org/packages/7c/24/75d274228acf35ceeb2850b8ce04de9dd7355ff7a0b49d607ee60c29c518/aiohttp-3.13.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f76c1e3fe7d7c8afad7ed193f89a292e1999608170dcc9751a7462a87dfd5bc0", size = 1743131, upload-time = "2026-01-03T17:30:58.256Z" }, - { url = "https://files.pythonhosted.org/packages/04/98/3d21dde21889b17ca2eea54fdcff21b27b93f45b7bb94ca029c31ab59dc3/aiohttp-3.13.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fc290605db2a917f6e81b0e1e0796469871f5af381ce15c604a3c5c7e51cb730", size = 1556863, upload-time = "2026-01-03T17:31:00.445Z" }, - { url = "https://files.pythonhosted.org/packages/9e/84/da0c3ab1192eaf64782b03971ab4055b475d0db07b17eff925e8c93b3aa5/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4021b51936308aeea0367b8f006dc999ca02bc118a0cc78c303f50a2ff6afb91", size = 1682793, upload-time = "2026-01-03T17:31:03.024Z" }, - { url = "https://files.pythonhosted.org/packages/ff/0f/5802ada182f575afa02cbd0ec5180d7e13a402afb7c2c03a9aa5e5d49060/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:49a03727c1bba9a97d3e93c9f93ca03a57300f484b6e935463099841261195d3", size = 1716676, upload-time = "2026-01-03T17:31:04.842Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8c/714d53bd8b5a4560667f7bbbb06b20c2382f9c7847d198370ec6526af39c/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3d9908a48eb7416dc1f4524e69f1d32e5d90e3981e4e37eb0aa1cd18f9cfa2a4", size = 1733217, upload-time = "2026-01-03T17:31:06.868Z" }, - { url = "https://files.pythonhosted.org/packages/7d/79/e2176f46d2e963facea939f5be2d26368ce543622be6f00a12844d3c991f/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2712039939ec963c237286113c68dbad80a82a4281543f3abf766d9d73228998", size = 1552303, upload-time = "2026-01-03T17:31:08.958Z" }, - { url = "https://files.pythonhosted.org/packages/ab/6a/28ed4dea1759916090587d1fe57087b03e6c784a642b85ef48217b0277ae/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7bfdc049127717581866fa4708791220970ce291c23e28ccf3922c700740fdc0", size = 1763673, upload-time = "2026-01-03T17:31:10.676Z" }, - { url = "https://files.pythonhosted.org/packages/e8/35/4a3daeb8b9fab49240d21c04d50732313295e4bd813a465d840236dd0ce1/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8057c98e0c8472d8846b9c79f56766bcc57e3e8ac7bfd510482332366c56c591", size = 1721120, upload-time = "2026-01-03T17:31:12.575Z" }, - { url = "https://files.pythonhosted.org/packages/bc/9f/d643bb3c5fb99547323e635e251c609fbbc660d983144cfebec529e09264/aiohttp-3.13.3-cp313-cp313-win32.whl", hash = "sha256:1449ceddcdbcf2e0446957863af03ebaaa03f94c090f945411b61269e2cb5daf", size = 427383, upload-time = "2026-01-03T17:31:14.382Z" }, - { url = "https://files.pythonhosted.org/packages/4e/f1/ab0395f8a79933577cdd996dd2f9aa6014af9535f65dddcf88204682fe62/aiohttp-3.13.3-cp313-cp313-win_amd64.whl", hash = "sha256:693781c45a4033d31d4187d2436f5ac701e7bbfe5df40d917736108c1cc7436e", size = 453899, upload-time = "2026-01-03T17:31:15.958Z" }, - { url = "https://files.pythonhosted.org/packages/99/36/5b6514a9f5d66f4e2597e40dea2e3db271e023eb7a5d22defe96ba560996/aiohttp-3.13.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:ea37047c6b367fd4bd632bff8077449b8fa034b69e812a18e0132a00fae6e808", size = 737238, upload-time = "2026-01-03T17:31:17.909Z" }, - { url = "https://files.pythonhosted.org/packages/f7/49/459327f0d5bcd8c6c9ca69e60fdeebc3622861e696490d8674a6d0cb90a6/aiohttp-3.13.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6fc0e2337d1a4c3e6acafda6a78a39d4c14caea625124817420abceed36e2415", size = 492292, upload-time = "2026-01-03T17:31:19.919Z" }, - { url = "https://files.pythonhosted.org/packages/e8/0b/b97660c5fd05d3495b4eb27f2d0ef18dc1dc4eff7511a9bf371397ff0264/aiohttp-3.13.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c685f2d80bb67ca8c3837823ad76196b3694b0159d232206d1e461d3d434666f", size = 493021, upload-time = "2026-01-03T17:31:21.636Z" }, - { url = "https://files.pythonhosted.org/packages/54/d4/438efabdf74e30aeceb890c3290bbaa449780583b1270b00661126b8aae4/aiohttp-3.13.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e377758516d262bde50c2584fc6c578af272559c409eecbdd2bae1601184d6", size = 1717263, upload-time = "2026-01-03T17:31:23.296Z" }, - { url = "https://files.pythonhosted.org/packages/71/f2/7bddc7fd612367d1459c5bcf598a9e8f7092d6580d98de0e057eb42697ad/aiohttp-3.13.3-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:34749271508078b261c4abb1767d42b8d0c0cc9449c73a4df494777dc55f0687", size = 1669107, upload-time = "2026-01-03T17:31:25.334Z" }, - { url = "https://files.pythonhosted.org/packages/00/5a/1aeaecca40e22560f97610a329e0e5efef5e0b5afdf9f857f0d93839ab2e/aiohttp-3.13.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:82611aeec80eb144416956ec85b6ca45a64d76429c1ed46ae1b5f86c6e0c9a26", size = 1760196, upload-time = "2026-01-03T17:31:27.394Z" }, - { url = "https://files.pythonhosted.org/packages/f8/f8/0ff6992bea7bd560fc510ea1c815f87eedd745fe035589c71ce05612a19a/aiohttp-3.13.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2fff83cfc93f18f215896e3a190e8e5cb413ce01553901aca925176e7568963a", size = 1843591, upload-time = "2026-01-03T17:31:29.238Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d1/e30e537a15f53485b61f5be525f2157da719819e8377298502aebac45536/aiohttp-3.13.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bbe7d4cecacb439e2e2a8a1a7b935c25b812af7a5fd26503a66dadf428e79ec1", size = 1720277, upload-time = "2026-01-03T17:31:31.053Z" }, - { url = "https://files.pythonhosted.org/packages/84/45/23f4c451d8192f553d38d838831ebbc156907ea6e05557f39563101b7717/aiohttp-3.13.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b928f30fe49574253644b1ca44b1b8adbd903aa0da4b9054a6c20fc7f4092a25", size = 1548575, upload-time = "2026-01-03T17:31:32.87Z" }, - { url = "https://files.pythonhosted.org/packages/6a/ed/0a42b127a43712eda7807e7892c083eadfaf8429ca8fb619662a530a3aab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7b5e8fe4de30df199155baaf64f2fcd604f4c678ed20910db8e2c66dc4b11603", size = 1679455, upload-time = "2026-01-03T17:31:34.76Z" }, - { url = "https://files.pythonhosted.org/packages/2e/b5/c05f0c2b4b4fe2c9d55e73b6d3ed4fd6c9dc2684b1d81cbdf77e7fad9adb/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:8542f41a62bcc58fc7f11cf7c90e0ec324ce44950003feb70640fc2a9092c32a", size = 1687417, upload-time = "2026-01-03T17:31:36.699Z" }, - { url = "https://files.pythonhosted.org/packages/c9/6b/915bc5dad66aef602b9e459b5a973529304d4e89ca86999d9d75d80cbd0b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5e1d8c8b8f1d91cd08d8f4a3c2b067bfca6ec043d3ff36de0f3a715feeedf926", size = 1729968, upload-time = "2026-01-03T17:31:38.622Z" }, - { url = "https://files.pythonhosted.org/packages/11/3b/e84581290a9520024a08640b63d07673057aec5ca548177a82026187ba73/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:90455115e5da1c3c51ab619ac57f877da8fd6d73c05aacd125c5ae9819582aba", size = 1545690, upload-time = "2026-01-03T17:31:40.57Z" }, - { url = "https://files.pythonhosted.org/packages/f5/04/0c3655a566c43fd647c81b895dfe361b9f9ad6d58c19309d45cff52d6c3b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:042e9e0bcb5fba81886c8b4fbb9a09d6b8a00245fd8d88e4d989c1f96c74164c", size = 1746390, upload-time = "2026-01-03T17:31:42.857Z" }, - { url = "https://files.pythonhosted.org/packages/1f/53/71165b26978f719c3419381514c9690bd5980e764a09440a10bb816ea4ab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2eb752b102b12a76ca02dff751a801f028b4ffbbc478840b473597fc91a9ed43", size = 1702188, upload-time = "2026-01-03T17:31:44.984Z" }, - { url = "https://files.pythonhosted.org/packages/29/a7/cbe6c9e8e136314fa1980da388a59d2f35f35395948a08b6747baebb6aa6/aiohttp-3.13.3-cp314-cp314-win32.whl", hash = "sha256:b556c85915d8efaed322bf1bdae9486aa0f3f764195a0fb6ee962e5c71ef5ce1", size = 433126, upload-time = "2026-01-03T17:31:47.463Z" }, - { url = "https://files.pythonhosted.org/packages/de/56/982704adea7d3b16614fc5936014e9af85c0e34b58f9046655817f04306e/aiohttp-3.13.3-cp314-cp314-win_amd64.whl", hash = "sha256:9bf9f7a65e7aa20dd764151fb3d616c81088f91f8df39c3893a536e279b4b984", size = 459128, upload-time = "2026-01-03T17:31:49.2Z" }, - { url = "https://files.pythonhosted.org/packages/6c/2a/3c79b638a9c3d4658d345339d22070241ea341ed4e07b5ac60fb0f418003/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:05861afbbec40650d8a07ea324367cb93e9e8cc7762e04dd4405df99fa65159c", size = 769512, upload-time = "2026-01-03T17:31:51.134Z" }, - { url = "https://files.pythonhosted.org/packages/29/b9/3e5014d46c0ab0db8707e0ac2711ed28c4da0218c358a4e7c17bae0d8722/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2fc82186fadc4a8316768d61f3722c230e2c1dcab4200d52d2ebdf2482e47592", size = 506444, upload-time = "2026-01-03T17:31:52.85Z" }, - { url = "https://files.pythonhosted.org/packages/90/03/c1d4ef9a054e151cd7839cdc497f2638f00b93cbe8043983986630d7a80c/aiohttp-3.13.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0add0900ff220d1d5c5ebbf99ed88b0c1bbf87aa7e4262300ed1376a6b13414f", size = 510798, upload-time = "2026-01-03T17:31:54.91Z" }, - { url = "https://files.pythonhosted.org/packages/ea/76/8c1e5abbfe8e127c893fe7ead569148a4d5a799f7cf958d8c09f3eedf097/aiohttp-3.13.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:568f416a4072fbfae453dcf9a99194bbb8bdeab718e08ee13dfa2ba0e4bebf29", size = 1868835, upload-time = "2026-01-03T17:31:56.733Z" }, - { url = "https://files.pythonhosted.org/packages/8e/ac/984c5a6f74c363b01ff97adc96a3976d9c98940b8969a1881575b279ac5d/aiohttp-3.13.3-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:add1da70de90a2569c5e15249ff76a631ccacfe198375eead4aadf3b8dc849dc", size = 1720486, upload-time = "2026-01-03T17:31:58.65Z" }, - { url = "https://files.pythonhosted.org/packages/b2/9a/b7039c5f099c4eb632138728828b33428585031a1e658d693d41d07d89d1/aiohttp-3.13.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:10b47b7ba335d2e9b1239fa571131a87e2d8ec96b333e68b2a305e7a98b0bae2", size = 1847951, upload-time = "2026-01-03T17:32:00.989Z" }, - { url = "https://files.pythonhosted.org/packages/3c/02/3bec2b9a1ba3c19ff89a43a19324202b8eb187ca1e928d8bdac9bbdddebd/aiohttp-3.13.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3dd4dce1c718e38081c8f35f323209d4c1df7d4db4bab1b5c88a6b4d12b74587", size = 1941001, upload-time = "2026-01-03T17:32:03.122Z" }, - { url = "https://files.pythonhosted.org/packages/37/df/d879401cedeef27ac4717f6426c8c36c3091c6e9f08a9178cc87549c537f/aiohttp-3.13.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34bac00a67a812570d4a460447e1e9e06fae622946955f939051e7cc895cfab8", size = 1797246, upload-time = "2026-01-03T17:32:05.255Z" }, - { url = "https://files.pythonhosted.org/packages/8d/15/be122de1f67e6953add23335c8ece6d314ab67c8bebb3f181063010795a7/aiohttp-3.13.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a19884d2ee70b06d9204b2727a7b9f983d0c684c650254679e716b0b77920632", size = 1627131, upload-time = "2026-01-03T17:32:07.607Z" }, - { url = "https://files.pythonhosted.org/packages/12/12/70eedcac9134cfa3219ab7af31ea56bc877395b1ac30d65b1bc4b27d0438/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5f8ca7f2bb6ba8348a3614c7918cc4bb73268c5ac2a207576b7afea19d3d9f64", size = 1795196, upload-time = "2026-01-03T17:32:09.59Z" }, - { url = "https://files.pythonhosted.org/packages/32/11/b30e1b1cd1f3054af86ebe60df96989c6a414dd87e27ad16950eee420bea/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:b0d95340658b9d2f11d9697f59b3814a9d3bb4b7a7c20b131df4bcef464037c0", size = 1782841, upload-time = "2026-01-03T17:32:11.445Z" }, - { url = "https://files.pythonhosted.org/packages/88/0d/d98a9367b38912384a17e287850f5695c528cff0f14f791ce8ee2e4f7796/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:a1e53262fd202e4b40b70c3aff944a8155059beedc8a89bba9dc1f9ef06a1b56", size = 1795193, upload-time = "2026-01-03T17:32:13.705Z" }, - { url = "https://files.pythonhosted.org/packages/43/a5/a2dfd1f5ff5581632c7f6a30e1744deda03808974f94f6534241ef60c751/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:d60ac9663f44168038586cab2157e122e46bdef09e9368b37f2d82d354c23f72", size = 1621979, upload-time = "2026-01-03T17:32:15.965Z" }, - { url = "https://files.pythonhosted.org/packages/fa/f0/12973c382ae7c1cccbc4417e129c5bf54c374dfb85af70893646e1f0e749/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:90751b8eed69435bac9ff4e3d2f6b3af1f57e37ecb0fbeee59c0174c9e2d41df", size = 1822193, upload-time = "2026-01-03T17:32:18.219Z" }, - { url = "https://files.pythonhosted.org/packages/3c/5f/24155e30ba7f8c96918af1350eb0663e2430aad9e001c0489d89cd708ab1/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fc353029f176fd2b3ec6cfc71be166aba1936fe5d73dd1992ce289ca6647a9aa", size = 1769801, upload-time = "2026-01-03T17:32:20.25Z" }, - { url = "https://files.pythonhosted.org/packages/eb/f8/7314031ff5c10e6ece114da79b338ec17eeff3a079e53151f7e9f43c4723/aiohttp-3.13.3-cp314-cp314t-win32.whl", hash = "sha256:2e41b18a58da1e474a057b3d35248d8320029f61d70a37629535b16a0c8f3767", size = 466523, upload-time = "2026-01-03T17:32:22.215Z" }, - { url = "https://files.pythonhosted.org/packages/b4/63/278a98c715ae467624eafe375542d8ba9b4383a016df8fdefe0ae28382a7/aiohttp-3.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344", size = 499694, upload-time = "2026-01-03T17:32:24.546Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/45/4a/064321452809dae953c1ed6e017504e72551a26b6f5708a5a80e4bf556ff/aiohttp-3.13.4.tar.gz", hash = "sha256:d97a6d09c66087890c2ab5d49069e1e570583f7ac0314ecf98294c1b6aaebd38", size = 7859748, upload-time = "2026-03-28T17:19:40.6Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/bd/ede278648914cabbabfdf95e436679b5d4156e417896a9b9f4587169e376/aiohttp-3.13.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ee62d4471ce86b108b19c3364db4b91180d13fe3510144872d6bad5401957360", size = 752158, upload-time = "2026-03-28T17:16:06.901Z" }, + { url = "https://files.pythonhosted.org/packages/90/de/581c053253c07b480b03785196ca5335e3c606a37dc73e95f6527f1591fe/aiohttp-3.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c0fd8f41b54b58636402eb493afd512c23580456f022c1ba2db0f810c959ed0d", size = 501037, upload-time = "2026-03-28T17:16:08.82Z" }, + { url = "https://files.pythonhosted.org/packages/fa/f9/a5ede193c08f13cc42c0a5b50d1e246ecee9115e4cf6e900d8dbd8fd6acb/aiohttp-3.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4baa48ce49efd82d6b1a0be12d6a36b35e5594d1dd42f8bfba96ea9f8678b88c", size = 501556, upload-time = "2026-03-28T17:16:10.63Z" }, + { url = "https://files.pythonhosted.org/packages/d6/10/88ff67cd48a6ec36335b63a640abe86135791544863e0cfe1f065d6cef7a/aiohttp-3.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d738ebab9f71ee652d9dbd0211057690022201b11197f9a7324fd4dba128aa97", size = 1757314, upload-time = "2026-03-28T17:16:12.498Z" }, + { url = "https://files.pythonhosted.org/packages/8b/15/fdb90a5cf5a1f52845c276e76298c75fbbcc0ac2b4a86551906d54529965/aiohttp-3.13.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0ce692c3468fa831af7dceed52edf51ac348cebfc8d3feb935927b63bd3e8576", size = 1731819, upload-time = "2026-03-28T17:16:14.558Z" }, + { url = "https://files.pythonhosted.org/packages/ec/df/28146785a007f7820416be05d4f28cc207493efd1e8c6c1068e9bdc29198/aiohttp-3.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8e08abcfe752a454d2cb89ff0c08f2d1ecd057ae3e8cc6d84638de853530ebab", size = 1793279, upload-time = "2026-03-28T17:16:16.594Z" }, + { url = "https://files.pythonhosted.org/packages/10/47/689c743abf62ea7a77774d5722f220e2c912a77d65d368b884d9779ef41b/aiohttp-3.13.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5977f701b3fff36367a11087f30ea73c212e686d41cd363c50c022d48b011d8d", size = 1891082, upload-time = "2026-03-28T17:16:18.71Z" }, + { url = "https://files.pythonhosted.org/packages/b0/b6/f7f4f318c7e58c23b761c9b13b9a3c9b394e0f9d5d76fbc6622fa98509f6/aiohttp-3.13.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:54203e10405c06f8b6020bd1e076ae0fe6c194adcee12a5a78af3ffa3c57025e", size = 1773938, upload-time = "2026-03-28T17:16:21.125Z" }, + { url = "https://files.pythonhosted.org/packages/aa/06/f207cb3121852c989586a6fc16ff854c4fcc8651b86c5d3bd1fc83057650/aiohttp-3.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:358a6af0145bc4dda037f13167bef3cce54b132087acc4c295c739d05d16b1c3", size = 1579548, upload-time = "2026-03-28T17:16:23.588Z" }, + { url = "https://files.pythonhosted.org/packages/6c/58/e1289661a32161e24c1fe479711d783067210d266842523752869cc1d9c2/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:898ea1850656d7d61832ef06aa9846ab3ddb1621b74f46de78fbc5e1a586ba83", size = 1714669, upload-time = "2026-03-28T17:16:25.713Z" }, + { url = "https://files.pythonhosted.org/packages/96/0a/3e86d039438a74a86e6a948a9119b22540bae037d6ba317a042ae3c22711/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7bc30cceb710cf6a44e9617e43eebb6e3e43ad855a34da7b4b6a73537d8a6763", size = 1754175, upload-time = "2026-03-28T17:16:28.18Z" }, + { url = "https://files.pythonhosted.org/packages/f4/30/e717fc5df83133ba467a560b6d8ef20197037b4bb5d7075b90037de1018e/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4a31c0c587a8a038f19a4c7e60654a6c899c9de9174593a13e7cc6e15ff271f9", size = 1762049, upload-time = "2026-03-28T17:16:30.941Z" }, + { url = "https://files.pythonhosted.org/packages/e4/28/8f7a2d4492e336e40005151bdd94baf344880a4707573378579f833a64c1/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2062f675f3fe6e06d6113eb74a157fb9df58953ffed0cdb4182554b116545758", size = 1570861, upload-time = "2026-03-28T17:16:32.953Z" }, + { url = "https://files.pythonhosted.org/packages/78/45/12e1a3d0645968b1c38de4b23fdf270b8637735ea057d4f84482ff918ad9/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3d1ba8afb847ff80626d5e408c1fdc99f942acc877d0702fe137015903a220a9", size = 1790003, upload-time = "2026-03-28T17:16:35.468Z" }, + { url = "https://files.pythonhosted.org/packages/eb/0f/60374e18d590de16dcb39d6ff62f39c096c1b958e6f37727b5870026ea30/aiohttp-3.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b08149419994cdd4d5eecf7fd4bc5986b5a9380285bcd01ab4c0d6bfca47b79d", size = 1737289, upload-time = "2026-03-28T17:16:38.187Z" }, + { url = "https://files.pythonhosted.org/packages/02/bf/535e58d886cfbc40a8b0013c974afad24ef7632d645bca0b678b70033a60/aiohttp-3.13.4-cp312-cp312-win32.whl", hash = "sha256:fc432f6a2c4f720180959bc19aa37259651c1a4ed8af8afc84dd41c60f15f791", size = 434185, upload-time = "2026-03-28T17:16:40.735Z" }, + { url = "https://files.pythonhosted.org/packages/1e/1a/d92e3325134ebfff6f4069f270d3aac770d63320bd1fcd0eca023e74d9a8/aiohttp-3.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:6148c9ae97a3e8bff9a1fc9c757fa164116f86c100468339730e717590a3fb77", size = 461285, upload-time = "2026-03-28T17:16:42.713Z" }, + { url = "https://files.pythonhosted.org/packages/e3/ac/892f4162df9b115b4758d615f32ec63d00f3084c705ff5526630887b9b42/aiohttp-3.13.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:63dd5e5b1e43b8fb1e91b79b7ceba1feba588b317d1edff385084fcc7a0a4538", size = 745744, upload-time = "2026-03-28T17:16:44.67Z" }, + { url = "https://files.pythonhosted.org/packages/97/a9/c5b87e4443a2f0ea88cb3000c93a8fdad1ee63bffc9ded8d8c8e0d66efc6/aiohttp-3.13.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:746ac3cc00b5baea424dacddea3ec2c2702f9590de27d837aa67004db1eebc6e", size = 498178, upload-time = "2026-03-28T17:16:46.766Z" }, + { url = "https://files.pythonhosted.org/packages/94/42/07e1b543a61250783650df13da8ddcdc0d0a5538b2bd15cef6e042aefc61/aiohttp-3.13.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bda8f16ea99d6a6705e5946732e48487a448be874e54a4f73d514660ff7c05d3", size = 498331, upload-time = "2026-03-28T17:16:48.9Z" }, + { url = "https://files.pythonhosted.org/packages/20/d6/492f46bf0328534124772d0cf58570acae5b286ea25006900650f69dae0e/aiohttp-3.13.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4b061e7b5f840391e3f64d0ddf672973e45c4cfff7a0feea425ea24e51530fc2", size = 1744414, upload-time = "2026-03-28T17:16:50.968Z" }, + { url = "https://files.pythonhosted.org/packages/e2/4d/e02627b2683f68051246215d2d62b2d2f249ff7a285e7a858dc47d6b6a14/aiohttp-3.13.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b252e8d5cd66184b570d0d010de742736e8a4fab22c58299772b0c5a466d4b21", size = 1719226, upload-time = "2026-03-28T17:16:53.173Z" }, + { url = "https://files.pythonhosted.org/packages/7b/6c/5d0a3394dd2b9f9aeba6e1b6065d0439e4b75d41f1fb09a3ec010b43552b/aiohttp-3.13.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:20af8aad61d1803ff11152a26146d8d81c266aa8c5aa9b4504432abb965c36a0", size = 1782110, upload-time = "2026-03-28T17:16:55.362Z" }, + { url = "https://files.pythonhosted.org/packages/0d/2d/c20791e3437700a7441a7edfb59731150322424f5aadf635602d1d326101/aiohttp-3.13.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:13a5cc924b59859ad2adb1478e31f410a7ed46e92a2a619d6d1dd1a63c1a855e", size = 1884809, upload-time = "2026-03-28T17:16:57.734Z" }, + { url = "https://files.pythonhosted.org/packages/c8/94/d99dbfbd1924a87ef643833932eb2a3d9e5eee87656efea7d78058539eff/aiohttp-3.13.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:534913dfb0a644d537aebb4123e7d466d94e3be5549205e6a31f72368980a81a", size = 1764938, upload-time = "2026-03-28T17:17:00.221Z" }, + { url = "https://files.pythonhosted.org/packages/49/61/3ce326a1538781deb89f6cf5e094e2029cd308ed1e21b2ba2278b08426f6/aiohttp-3.13.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:320e40192a2dcc1cf4b5576936e9652981ab596bf81eb309535db7e2f5b5672f", size = 1570697, upload-time = "2026-03-28T17:17:02.985Z" }, + { url = "https://files.pythonhosted.org/packages/b6/77/4ab5a546857bb3028fbaf34d6eea180267bdab022ee8b1168b1fcde4bfdd/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9e587fcfce2bcf06526a43cb705bdee21ac089096f2e271d75de9c339db3100c", size = 1702258, upload-time = "2026-03-28T17:17:05.28Z" }, + { url = "https://files.pythonhosted.org/packages/79/63/d8f29021e39bc5af8e5d5e9da1b07976fb9846487a784e11e4f4eeda4666/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9eb9c2eea7278206b5c6c1441fdd9dc420c278ead3f3b2cc87f9b693698cc500", size = 1740287, upload-time = "2026-03-28T17:17:07.712Z" }, + { url = "https://files.pythonhosted.org/packages/55/3a/cbc6b3b124859a11bc8055d3682c26999b393531ef926754a3445b99dfef/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:29be00c51972b04bf9d5c8f2d7f7314f48f96070ca40a873a53056e652e805f7", size = 1753011, upload-time = "2026-03-28T17:17:10.053Z" }, + { url = "https://files.pythonhosted.org/packages/e0/30/836278675205d58c1368b21520eab9572457cf19afd23759216c04483048/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:90c06228a6c3a7c9f776fe4fc0b7ff647fffd3bed93779a6913c804ae00c1073", size = 1566359, upload-time = "2026-03-28T17:17:12.433Z" }, + { url = "https://files.pythonhosted.org/packages/50/b4/8032cc9b82d17e4277704ba30509eaccb39329dc18d6a35f05e424439e32/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:a533ec132f05fd9a1d959e7f34184cd7d5e8511584848dab85faefbaac573069", size = 1785537, upload-time = "2026-03-28T17:17:14.721Z" }, + { url = "https://files.pythonhosted.org/packages/17/7d/5873e98230bde59f493bf1f7c3e327486a4b5653fa401144704df5d00211/aiohttp-3.13.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1c946f10f413836f82ea4cfb90200d2a59578c549f00857e03111cf45ad01ca5", size = 1740752, upload-time = "2026-03-28T17:17:17.387Z" }, + { url = "https://files.pythonhosted.org/packages/7b/f2/13e46e0df051494d7d3c68b7f72d071f48c384c12716fc294f75d5b1a064/aiohttp-3.13.4-cp313-cp313-win32.whl", hash = "sha256:48708e2706106da6967eff5908c78ca3943f005ed6bcb75da2a7e4da94ef8c70", size = 433187, upload-time = "2026-03-28T17:17:19.523Z" }, + { url = "https://files.pythonhosted.org/packages/ea/c0/649856ee655a843c8f8664592cfccb73ac80ede6a8c8db33a25d810c12db/aiohttp-3.13.4-cp313-cp313-win_amd64.whl", hash = "sha256:74a2eb058da44fa3a877a49e2095b591d4913308bb424c418b77beb160c55ce3", size = 459778, upload-time = "2026-03-28T17:17:21.964Z" }, + { url = "https://files.pythonhosted.org/packages/6d/29/6657cc37ae04cacc2dbf53fb730a06b6091cc4cbe745028e047c53e6d840/aiohttp-3.13.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:e0a2c961fc92abeff61d6444f2ce6ad35bb982db9fc8ff8a47455beacf454a57", size = 749363, upload-time = "2026-03-28T17:17:24.044Z" }, + { url = "https://files.pythonhosted.org/packages/90/7f/30ccdf67ca3d24b610067dc63d64dcb91e5d88e27667811640644aa4a85d/aiohttp-3.13.4-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:153274535985a0ff2bff1fb6c104ed547cec898a09213d21b0f791a44b14d933", size = 499317, upload-time = "2026-03-28T17:17:26.199Z" }, + { url = "https://files.pythonhosted.org/packages/93/13/e372dd4e68ad04ee25dafb050c7f98b0d91ea643f7352757e87231102555/aiohttp-3.13.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:351f3171e2458da3d731ce83f9e6b9619e325c45cbd534c7759750cabf453ad7", size = 500477, upload-time = "2026-03-28T17:17:28.279Z" }, + { url = "https://files.pythonhosted.org/packages/e5/fe/ee6298e8e586096fb6f5eddd31393d8544f33ae0792c71ecbb4c2bef98ac/aiohttp-3.13.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f989ac8bc5595ff761a5ccd32bdb0768a117f36dd1504b1c2c074ed5d3f4df9c", size = 1737227, upload-time = "2026-03-28T17:17:30.587Z" }, + { url = "https://files.pythonhosted.org/packages/b0/b9/a7a0463a09e1a3fe35100f74324f23644bfc3383ac5fd5effe0722a5f0b7/aiohttp-3.13.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d36fc1709110ec1e87a229b201dd3ddc32aa01e98e7868083a794609b081c349", size = 1694036, upload-time = "2026-03-28T17:17:33.29Z" }, + { url = "https://files.pythonhosted.org/packages/57/7c/8972ae3fb7be00a91aee6b644b2a6a909aedb2c425269a3bfd90115e6f8f/aiohttp-3.13.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42adaeea83cbdf069ab94f5103ce0787c21fb1a0153270da76b59d5578302329", size = 1786814, upload-time = "2026-03-28T17:17:36.035Z" }, + { url = "https://files.pythonhosted.org/packages/93/01/c81e97e85c774decbaf0d577de7d848934e8166a3a14ad9f8aa5be329d28/aiohttp-3.13.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:92deb95469928cc41fd4b42a95d8012fa6df93f6b1c0a83af0ffbc4a5e218cde", size = 1866676, upload-time = "2026-03-28T17:17:38.441Z" }, + { url = "https://files.pythonhosted.org/packages/5a/5f/5b46fe8694a639ddea2cd035bf5729e4677ea882cb251396637e2ef1590d/aiohttp-3.13.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0c0c7c07c4257ef3a1df355f840bc62d133bcdef5c1c5ba75add3c08553e2eed", size = 1740842, upload-time = "2026-03-28T17:17:40.783Z" }, + { url = "https://files.pythonhosted.org/packages/20/a2/0d4b03d011cca6b6b0acba8433193c1e484efa8d705ea58295590fe24203/aiohttp-3.13.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f062c45de8a1098cb137a1898819796a2491aec4e637a06b03f149315dff4d8f", size = 1566508, upload-time = "2026-03-28T17:17:43.235Z" }, + { url = "https://files.pythonhosted.org/packages/98/17/e689fd500da52488ec5f889effd6404dece6a59de301e380f3c64f167beb/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:76093107c531517001114f0ebdb4f46858ce818590363e3e99a4a2280334454a", size = 1700569, upload-time = "2026-03-28T17:17:46.165Z" }, + { url = "https://files.pythonhosted.org/packages/d8/0d/66402894dbcf470ef7db99449e436105ea862c24f7ea4c95c683e635af35/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:6f6ec32162d293b82f8b63a16edc80769662fbd5ae6fbd4936d3206a2c2cc63b", size = 1707407, upload-time = "2026-03-28T17:17:48.825Z" }, + { url = "https://files.pythonhosted.org/packages/2f/eb/af0ab1a3650092cbd8e14ef29e4ab0209e1460e1c299996c3f8288b3f1ff/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5903e2db3d202a00ad9f0ec35a122c005e85d90c9836ab4cda628f01edf425e2", size = 1752214, upload-time = "2026-03-28T17:17:51.206Z" }, + { url = "https://files.pythonhosted.org/packages/5a/bf/72326f8a98e4c666f292f03c385545963cc65e358835d2a7375037a97b57/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2d5bea57be7aca98dbbac8da046d99b5557c5cf4e28538c4c786313078aca09e", size = 1562162, upload-time = "2026-03-28T17:17:53.634Z" }, + { url = "https://files.pythonhosted.org/packages/67/9f/13b72435f99151dd9a5469c96b3b5f86aa29b7e785ca7f35cf5e538f74c0/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:bcf0c9902085976edc0232b75006ef38f89686901249ce14226b6877f88464fb", size = 1768904, upload-time = "2026-03-28T17:17:55.991Z" }, + { url = "https://files.pythonhosted.org/packages/18/bc/28d4970e7d5452ac7776cdb5431a1164a0d9cf8bd2fffd67b4fb463aa56d/aiohttp-3.13.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3295f98bfeed2e867cab588f2a146a9db37a85e3ae9062abf46ba062bd29165", size = 1723378, upload-time = "2026-03-28T17:17:58.348Z" }, + { url = "https://files.pythonhosted.org/packages/53/74/b32458ca1a7f34d65bdee7aef2036adbe0438123d3d53e2b083c453c24dd/aiohttp-3.13.4-cp314-cp314-win32.whl", hash = "sha256:a598a5c5767e1369d8f5b08695cab1d8160040f796c4416af76fd773d229b3c9", size = 438711, upload-time = "2026-03-28T17:18:00.728Z" }, + { url = "https://files.pythonhosted.org/packages/40/b2/54b487316c2df3e03a8f3435e9636f8a81a42a69d942164830d193beb56a/aiohttp-3.13.4-cp314-cp314-win_amd64.whl", hash = "sha256:c555db4bc7a264bead5a7d63d92d41a1122fcd39cc62a4db815f45ad46f9c2c8", size = 464977, upload-time = "2026-03-28T17:18:03.367Z" }, + { url = "https://files.pythonhosted.org/packages/47/fb/e41b63c6ce71b07a59243bb8f3b457ee0c3402a619acb9d2c0d21ef0e647/aiohttp-3.13.4-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45abbbf09a129825d13c18c7d3182fecd46d9da3cfc383756145394013604ac1", size = 781549, upload-time = "2026-03-28T17:18:05.779Z" }, + { url = "https://files.pythonhosted.org/packages/97/53/532b8d28df1e17e44c4d9a9368b78dcb6bf0b51037522136eced13afa9e8/aiohttp-3.13.4-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:74c80b2bc2c2adb7b3d1941b2b60701ee2af8296fc8aad8b8bc48bc25767266c", size = 514383, upload-time = "2026-03-28T17:18:08.096Z" }, + { url = "https://files.pythonhosted.org/packages/1b/1f/62e5d400603e8468cd635812d99cb81cfdc08127a3dc474c647615f31339/aiohttp-3.13.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c97989ae40a9746650fa196894f317dafc12227c808c774929dda0ff873a5954", size = 518304, upload-time = "2026-03-28T17:18:10.642Z" }, + { url = "https://files.pythonhosted.org/packages/90/57/2326b37b10896447e3c6e0cbef4fe2486d30913639a5cfd1332b5d870f82/aiohttp-3.13.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dae86be9811493f9990ef44fff1685f5c1a3192e9061a71a109d527944eed551", size = 1893433, upload-time = "2026-03-28T17:18:13.121Z" }, + { url = "https://files.pythonhosted.org/packages/d2/b4/a24d82112c304afdb650167ef2fe190957d81cbddac7460bedd245f765aa/aiohttp-3.13.4-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1db491abe852ca2fa6cc48a3341985b0174b3741838e1341b82ac82c8bd9e871", size = 1755901, upload-time = "2026-03-28T17:18:16.21Z" }, + { url = "https://files.pythonhosted.org/packages/9e/2d/0883ef9d878d7846287f036c162a951968f22aabeef3ac97b0bea6f76d5d/aiohttp-3.13.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0e5d701c0aad02a7dce72eef6b93226cf3734330f1a31d69ebbf69f33b86666e", size = 1876093, upload-time = "2026-03-28T17:18:18.703Z" }, + { url = "https://files.pythonhosted.org/packages/ad/52/9204bb59c014869b71971addad6778f005daa72a96eed652c496789d7468/aiohttp-3.13.4-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8ac32a189081ae0a10ba18993f10f338ec94341f0d5df8fff348043962f3c6f8", size = 1970815, upload-time = "2026-03-28T17:18:21.858Z" }, + { url = "https://files.pythonhosted.org/packages/d6/b5/e4eb20275a866dde0f570f411b36c6b48f7b53edfe4f4071aa1b0728098a/aiohttp-3.13.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:98e968cdaba43e45c73c3f306fca418c8009a957733bac85937c9f9cf3f4de27", size = 1816223, upload-time = "2026-03-28T17:18:24.729Z" }, + { url = "https://files.pythonhosted.org/packages/d8/23/e98075c5bb146aa61a1239ee1ac7714c85e814838d6cebbe37d3fe19214a/aiohttp-3.13.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca114790c9144c335d538852612d3e43ea0f075288f4849cf4b05d6cd2238ce7", size = 1649145, upload-time = "2026-03-28T17:18:27.269Z" }, + { url = "https://files.pythonhosted.org/packages/d6/c1/7bad8be33bb06c2bb224b6468874346026092762cbec388c3bdb65a368ee/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ea2e071661ba9cfe11eabbc81ac5376eaeb3061f6e72ec4cc86d7cdd1ffbdbbb", size = 1816562, upload-time = "2026-03-28T17:18:29.847Z" }, + { url = "https://files.pythonhosted.org/packages/5c/10/c00323348695e9a5e316825969c88463dcc24c7e9d443244b8a2c9cf2eae/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:34e89912b6c20e0fd80e07fa401fd218a410aa1ce9f1c2f1dad6db1bd0ce0927", size = 1800333, upload-time = "2026-03-28T17:18:32.269Z" }, + { url = "https://files.pythonhosted.org/packages/84/43/9b2147a1df3559f49bd723e22905b46a46c068a53adb54abdca32c4de180/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:0e217cf9f6a42908c52b46e42c568bd57adc39c9286ced31aaace614b6087965", size = 1820617, upload-time = "2026-03-28T17:18:35.238Z" }, + { url = "https://files.pythonhosted.org/packages/a9/7f/b3481a81e7a586d02e99387b18c6dafff41285f6efd3daa2124c01f87eae/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:0c296f1221e21ba979f5ac1964c3b78cfde15c5c5f855ffd2caab337e9cd9182", size = 1643417, upload-time = "2026-03-28T17:18:37.949Z" }, + { url = "https://files.pythonhosted.org/packages/8f/72/07181226bc99ce1124e0f89280f5221a82d3ae6a6d9d1973ce429d48e52b/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d99a9d168ebaffb74f36d011750e490085ac418f4db926cce3989c8fe6cb6b1b", size = 1849286, upload-time = "2026-03-28T17:18:40.534Z" }, + { url = "https://files.pythonhosted.org/packages/1a/e6/1b3566e103eca6da5be4ae6713e112a053725c584e96574caf117568ffef/aiohttp-3.13.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cb19177205d93b881f3f89e6081593676043a6828f59c78c17a0fd6c1fbed2ba", size = 1782635, upload-time = "2026-03-28T17:18:43.073Z" }, + { url = "https://files.pythonhosted.org/packages/37/58/1b11c71904b8d079eb0c39fe664180dd1e14bebe5608e235d8bfbadc8929/aiohttp-3.13.4-cp314-cp314t-win32.whl", hash = "sha256:c606aa5656dab6552e52ca368e43869c916338346bfaf6304e15c58fb113ea30", size = 472537, upload-time = "2026-03-28T17:18:46.286Z" }, + { url = "https://files.pythonhosted.org/packages/bc/8f/87c56a1a1977d7dddea5b31e12189665a140fdb48a71e9038ff90bb564ec/aiohttp-3.13.4-cp314-cp314t-win_amd64.whl", hash = "sha256:014dcc10ec8ab8db681f0d68e939d1e9286a5aa2b993cbbdb0db130853e02144", size = 506381, upload-time = "2026-03-28T17:18:48.74Z" }, ] [[package]] @@ -612,7 +612,7 @@ standard = [ [[package]] name = "fastapi-cloud-cli" -version = "0.17.0" +version = "0.17.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "fastar" }, @@ -624,9 +624,9 @@ dependencies = [ { name = "typer" }, { name = "uvicorn", extra = ["standard"] }, ] -sdist = { url = "https://files.pythonhosted.org/packages/67/79/66567c39c5fab6dbebf9e40b3a3fcb0e2ec359517c87a67434c76b06e60b/fastapi_cloud_cli-0.17.0.tar.gz", hash = "sha256:2b6c241b63427023bd1e23b3251f23234aba4b05428b245a050e92db1389823c", size = 47276, upload-time = "2026-04-15T13:17:56.402Z" } +sdist = { url = "https://files.pythonhosted.org/packages/96/57/cee8e91b83f39e75ae5562a2237261442a8179dcb3b631c7398113157398/fastapi_cloud_cli-0.17.1.tar.gz", hash = "sha256:0baece208fa88063bec46dccb5fb512f3199162092165e57654b44e64adbc44d", size = 47409, upload-time = "2026-04-27T13:38:07.094Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/50/31/fa442466bacadffec3d6611509d6ea391b6ca01b6ee0d4af835bfdea3483/fastapi_cloud_cli-0.17.0-py3-none-any.whl", hash = "sha256:b496e6998f037f572ab06a233ce257828b4c701488ce500b5c9d725e970a7cb1", size = 33936, upload-time = "2026-04-15T13:17:55.112Z" }, + { url = "https://files.pythonhosted.org/packages/e7/a0/e252b68cf155409afabea037ab2971f41509481838847f6503fe890884ea/fastapi_cloud_cli-0.17.1-py3-none-any.whl", hash = "sha256:325e0199bdac7cb86f5df4f4a1d2070054095588088ef7b923a60cec458dcd63", size = 34046, upload-time = "2026-04-27T13:38:08.319Z" }, ] [[package]] @@ -942,11 +942,11 @@ wheels = [ [[package]] name = "fsspec" -version = "2026.3.0" +version = "2026.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e1/cf/b50ddf667c15276a9ab15a70ef5f257564de271957933ffea49d2cdbcdfb/fsspec-2026.3.0.tar.gz", hash = "sha256:1ee6a0e28677557f8c2f994e3eea77db6392b4de9cd1f5d7a9e87a0ae9d01b41", size = 313547, upload-time = "2026-03-27T19:11:14.892Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d5/8d/1c51c094345df128ca4a990d633fe1a0ff28726c9e6b3c41ba65087bba1d/fsspec-2026.4.0.tar.gz", hash = "sha256:301d8ac70ae90ef3ad05dcf94d6c3754a097f9b5fe4667d2787aa359ec7df7e4", size = 312760, upload-time = "2026-04-29T20:42:38.635Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d5/1f/5f4a3cd9e4440e9d9bc78ad0a91a1c8d46b4d429d5239ebe6793c9fe5c41/fsspec-2026.3.0-py3-none-any.whl", hash = "sha256:d2ceafaad1b3457968ed14efa28798162f1638dbb5d2a6868a2db002a5ee39a4", size = 202595, upload-time = "2026-03-27T19:11:13.595Z" }, + { url = "https://files.pythonhosted.org/packages/d5/0c/043d5e551459da400957a1395e0febbf771446ff34291afcbe3d8be2a279/fsspec-2026.4.0-py3-none-any.whl", hash = "sha256:11ef7bb35dab8a394fde6e608221d5cf3e8499401c249bebaeaad760a1a8dec2", size = 203402, upload-time = "2026-04-29T20:42:36.842Z" }, ] [[package]] @@ -993,14 +993,14 @@ wheels = [ [[package]] name = "gitpython" -version = "3.1.47" +version = "3.1.49" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "gitdb" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c1/bd/50db468e9b1310529a19fce651b3b0e753b5c07954d486cba31bbee9a5d5/gitpython-3.1.47.tar.gz", hash = "sha256:dba27f922bd2b42cb54c87a8ab3cb6beb6bf07f3d564e21ac848913a05a8a3cd", size = 216978, upload-time = "2026-04-22T02:44:44.059Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e1/63/210aaa302d6a0a78daa67c5c15bbac2cad361722841278b0209b6da20855/gitpython-3.1.49.tar.gz", hash = "sha256:42f9399c9eb33fc581014bedd76049dfbaf6375aa2a5754575966387280315e1", size = 219367, upload-time = "2026-04-29T00:31:20.478Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f2/c5/a1bc0996af85757903cf2bf444a7824e68e0035ce63fb41d6f76f9def68b/gitpython-3.1.47-py3-none-any.whl", hash = "sha256:489f590edfd6d20571b2c0e72c6a6ac6915ee8b8cd04572330e3842207a78905", size = 209547, upload-time = "2026-04-22T02:44:41.271Z" }, + { url = "https://files.pythonhosted.org/packages/fd/6f/b842bfa6f21d6f87c57f9abf7194225e55279d96d869775e19e9f7236fc5/gitpython-3.1.49-py3-none-any.whl", hash = "sha256:024b0422d7f84d15cd794844e029ffebd4c5d42a7eb9b936b458697ef550a02c", size = 212190, upload-time = "2026-04-29T00:31:18.412Z" }, ] [[package]] @@ -1190,7 +1190,7 @@ wheels = [ [[package]] name = "huggingface-hub" -version = "1.12.0" +version = "1.13.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, @@ -1203,9 +1203,9 @@ dependencies = [ { name = "typer" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/56/52/1b54cb569509c725a32c1315261ac9fd0e6b91bbbf74d86fca10d3376164/huggingface_hub-1.12.0.tar.gz", hash = "sha256:7c3fe85e24b652334e5d456d7a812cd9a071e75630fac4365d9165ab5e4a34b6", size = 763091, upload-time = "2026-04-24T13:32:08.674Z" } +sdist = { url = "https://files.pythonhosted.org/packages/89/ff/ec7ed2eb43bd7ce8bb2233d109cc235c3e807ffe5e469dc09db261fac05e/huggingface_hub-1.13.0.tar.gz", hash = "sha256:f6df2dac5abe82ce2fe05873d10d5ff47bc677d616a2f521f4ee26db9415d9d0", size = 781788, upload-time = "2026-04-30T11:57:33.858Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/2b/ef03ddb96bd1123503c2bd6932001020292deea649e9bf4caa2cb65a85bf/huggingface_hub-1.12.0-py3-none-any.whl", hash = "sha256:d74939969585ee35748bd66de09baf84099d461bda7287cd9043bfb99b0e424d", size = 646806, upload-time = "2026-04-24T13:32:06.717Z" }, + { url = "https://files.pythonhosted.org/packages/93/db/4b1cdae9460ae1f3ca020cd767f013430ce23eb1d9c890ae3a0609b38d26/huggingface_hub-1.13.0-py3-none-any.whl", hash = "sha256:e942cb50d6a08dd5306688b1ac05bda157fd2fcc88b63dae405f7bd0d3234005", size = 660643, upload-time = "2026-04-30T11:57:31.802Z" }, ] [[package]] @@ -1360,7 +1360,7 @@ wheels = [ [[package]] name = "litellm" -version = "1.83.13" +version = "1.83.14" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, @@ -1376,9 +1376,9 @@ dependencies = [ { name = "tiktoken" }, { name = "tokenizers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1f/ac/6cc660c476e1cb925055eb7d032b7774de71fce28b9dfec652fc277397f8/litellm-1.83.13.tar.gz", hash = "sha256:803b53077cd678bb78d40f1a971920a8cd06073520c756e3d2bcb5c4da7bd352", size = 14785817, upload-time = "2026-04-24T01:03:03.096Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8d/7c/c095649380adc96c8630273c1768c2ad1e74aa2ee1dd8dd05d218a60569f/litellm-1.83.14.tar.gz", hash = "sha256:24aef9b47cdc424c833e32f3727f411741c690832cd1fe4405e0077144fe09c9", size = 14836599, upload-time = "2026-04-26T03:16:10.176Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c5/57/99d553839018ba4c1b94fbe540f218fefb725fba79ecc2abf462cd39d719/litellm-1.83.13-py3-none-any.whl", hash = "sha256:14be9f3b4a9e207372f275d75e6f174a3e717352ccca6c63707c25fd6e23cac6", size = 16406883, upload-time = "2026-04-24T01:02:58.7Z" }, + { url = "https://files.pythonhosted.org/packages/7f/5c/1b5691575420135e90578543b2bf219497caa33cfd0af64cb38f30288450/litellm-1.83.14-py3-none-any.whl", hash = "sha256:92b11ba2a32cf80707ddf388d18526696c7999a21b418c5e3b6eda1243d2cfdb", size = 16457054, upload-time = "2026-04-26T03:16:05.72Z" }, ] [[package]] @@ -1952,11 +1952,11 @@ wheels = [ [[package]] name = "pathspec" -version = "1.1.0" +version = "1.1.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2e/17/9c3094b822982b9f1ea666d8580ce59000f61f87c1663556fb72031ad9ec/pathspec-1.1.0.tar.gz", hash = "sha256:f5d7c555da02fd8dde3e4a2354b6aba817a89112fa8f333f7917a2a4834dd080", size = 133918, upload-time = "2026-04-23T01:46:22.298Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/82/42f767fc1c1143d6fd36efb827202a2d997a375e160a71eb2888a925aac1/pathspec-1.1.1.tar.gz", hash = "sha256:17db5ecd524104a120e173814c90367a96a98d07c45b2e10c2f3919fff91bf5a", size = 135180, upload-time = "2026-04-27T01:46:08.907Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fa/c9/8eed0486f074e9f1ca7f8ce5ad663e65f12fdab344028d658fa1b03d35e0/pathspec-1.1.0-py3-none-any.whl", hash = "sha256:574b128f7456bd899045ccd142dd446af7e6cfd0072d63ad73fbc55fbb4aaa42", size = 56264, upload-time = "2026-04-23T01:46:20.606Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d9/7fb5aa316bc299258e68c73ba3bddbc499654a07f151cba08f6153988714/pathspec-1.1.1-py3-none-any.whl", hash = "sha256:a00ce642f577bf7f473932318056212bc4f8bfdf53128c78bbd5af0b9b20b189", size = 57328, upload-time = "2026-04-27T01:46:07.06Z" }, ] [[package]] @@ -2336,14 +2336,14 @@ wheels = [ [[package]] name = "pytest-agent-digest" -version = "0.3.1" +version = "0.3.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0b/f8/3256526960418ce2108807bdb2601298f8d64b39ca69382a6e11e53fd153/pytest_agent_digest-0.3.1.tar.gz", hash = "sha256:7c3f6f886d5cccc7350409b31a46ab3a0930d93c8e5b1c4341b919ca903e1072", size = 143017, upload-time = "2026-03-21T17:47:09.525Z" } +sdist = { url = "https://files.pythonhosted.org/packages/26/50/3422269f15c2cd2d2a77137a4e75144f7758d09833310bb81bfac7d00c55/pytest_agent_digest-0.3.2.tar.gz", hash = "sha256:b6b442869d4856797635f32001c3f03efac2d264e8a9c6b272e9b87cac40e9f1", size = 145780, upload-time = "2026-05-02T18:40:39.545Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/85/79f4cbf7ad59b62664410d59d174e908c4e607ed099b0f8c4267fe72f4f3/pytest_agent_digest-0.3.1-py3-none-any.whl", hash = "sha256:b209b755b0e9076dec0e934a3ed583474c71c900f56d115214b9f62a1cfc1ca8", size = 9356, upload-time = "2026-03-21T17:47:08.173Z" }, + { url = "https://files.pythonhosted.org/packages/2c/f8/487b929c9b61c2a0defc44c9120673e8fd463aac726136171bdf839923b5/pytest_agent_digest-0.3.2-py3-none-any.whl", hash = "sha256:37d00e70f5d1b098be2c27694f0b3376e0eb5f4cee2c46a323de5584c97091b5", size = 10229, upload-time = "2026-05-02T18:40:40.854Z" }, ] [[package]] @@ -2424,20 +2424,20 @@ wheels = [ [[package]] name = "python-dotenv" -version = "1.0.1" +version = "1.2.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115, upload-time = "2024-01-23T06:33:00.505Z" } +sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135, upload-time = "2026-03-01T16:00:26.196Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863, upload-time = "2024-01-23T06:32:58.246Z" }, + { url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" }, ] [[package]] name = "python-multipart" -version = "0.0.26" +version = "0.0.27" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/88/71/b145a380824a960ebd60e1014256dbb7d2253f2316ff2d73dfd8928ec2c3/python_multipart-0.0.26.tar.gz", hash = "sha256:08fadc45918cd615e26846437f50c5d6d23304da32c341f289a617127b081f17", size = 43501, upload-time = "2026-04-10T14:09:59.473Z" } +sdist = { url = "https://files.pythonhosted.org/packages/69/9b/f23807317a113dc36e74e75eb265a02dd1a4d9082abc3c1064acd22997c4/python_multipart-0.0.27.tar.gz", hash = "sha256:9870a6a8c5a20a5bf4f07c017bd1489006ff8836cff097b6933355ee2b49b602", size = 44043, upload-time = "2026-04-27T10:51:26.649Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9a/22/f1925cdda983ab66fc8ec6ec8014b959262747e58bdca26a4e3d1da29d56/python_multipart-0.0.26-py3-none-any.whl", hash = "sha256:c0b169f8c4484c13b0dcf2ef0ec3a4adb255c4b7d18d8e420477d2b1dd03f185", size = 28847, upload-time = "2026-04-10T14:09:58.131Z" }, + { url = "https://files.pythonhosted.org/packages/99/78/4126abcbdbd3c559d43e0db7f7b9173fc6befe45d39a2856cc0b8ec2a5a6/python_multipart-0.0.27-py3-none-any.whl", hash = "sha256:6fccfad17a27334bd0193681b369f476eda3409f17381a2d65aa7df3f7275645", size = 29254, upload-time = "2026-04-27T10:51:24.997Z" }, ] [[package]] @@ -3108,28 +3108,28 @@ wheels = [ [[package]] name = "uv" -version = "0.11.7" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9b/7d/17750123a8c8e324627534fe1ae2e7a46689db8492f1a834ab4fd229a7d8/uv-0.11.7.tar.gz", hash = "sha256:46d971489b00bdb27e0aa715e4a5cd4ef2c28ea5b6ef78f2b67bf861eb44b405", size = 4083385, upload-time = "2026-04-15T21:42:55.474Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b2/5b/2bb2ab6fe6c78c2be10852482ef0cae5f3171460a6e5e24c32c9a0843163/uv-0.11.7-py3-none-linux_armv6l.whl", hash = "sha256:f422d39530516b1dfb28bb6e90c32bb7dacd50f6a383cd6e40c1a859419fbc8c", size = 23757265, upload-time = "2026-04-15T21:43:14.494Z" }, - { url = "https://files.pythonhosted.org/packages/b2/f5/36ff27b01e60a88712628c8a5a6003b8e418883c24e084e506095844a797/uv-0.11.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:8b2fe1ec6775dad10183e3fdce430a5b37b7857d49763c884f3a67eaa8ca6f8a", size = 23184529, upload-time = "2026-04-15T21:42:30.225Z" }, - { url = "https://files.pythonhosted.org/packages/8a/fa/f379be661316698f877e78f4c51e5044be0b6f390803387237ad92c4057f/uv-0.11.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:162fa961a9a081dcea6e889c79f738a5ae56507047e4672964972e33c301bea9", size = 21780167, upload-time = "2026-04-15T21:42:44.942Z" }, - { url = "https://files.pythonhosted.org/packages/f2/7f/fbed29775b0612f4f5679d3226268f1a347161abc1727b4080fb41d9f46f/uv-0.11.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:5985a15a92bd9a170fc1947abb1fbc3e9828c5a430ad85b5bed8356c20b67a71", size = 23609640, upload-time = "2026-04-15T21:42:22.57Z" }, - { url = "https://files.pythonhosted.org/packages/ad/de/989a69634a869a22322770120557c2d8cbba5b77ec7cfad326b4ec0f0547/uv-0.11.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.musllinux_1_1_armv7l.whl", hash = "sha256:fab0bb43fbbc0ee5b5fee212078d2300c371b725faff7cf72eeaafa0bff0606b", size = 23322484, upload-time = "2026-04-15T21:43:26.52Z" }, - { url = "https://files.pythonhosted.org/packages/24/08/c1af05ea602eb4eb75d86badb6b0594cc104c3ca83ccf06d9ed4dd2186ad/uv-0.11.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:23d457d6731ebdb83f1bffebe4894edab2ef43c1ec5488433c74300db4958924", size = 23326385, upload-time = "2026-04-15T21:42:41.32Z" }, - { url = "https://files.pythonhosted.org/packages/68/99/e246962da06383e992ecab55000c62a50fb36efef855ea7264fad4816bf4/uv-0.11.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d6a17507b8139b8803f445a03fd097f732ce8356b1b7b13cdb4dd8ef7f4b2e0", size = 24985751, upload-time = "2026-04-15T21:42:37.777Z" }, - { url = "https://files.pythonhosted.org/packages/45/2d/b0b68083859579ce811996c1480765ec6a2442b44c451eaef53e6218fbae/uv-0.11.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd48823ca4b505124389f49ae50626ba9f57212b9047738efc95126ed5f3844d", size = 25724160, upload-time = "2026-04-15T21:43:18.762Z" }, - { url = "https://files.pythonhosted.org/packages/4e/19/5970e89d9e458fd3c4966bbc586a685a1c0ab0a8bf334503f63fa20b925b/uv-0.11.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb91f52ee67e10d5290f2c2897e2171357f1a10966de38d83eefa93d96843b0c", size = 25028512, upload-time = "2026-04-15T21:43:02.721Z" }, - { url = "https://files.pythonhosted.org/packages/83/eb/4e1557daf6693cb446ed28185664ad6682fd98c6dbac9e433cbc35df450a/uv-0.11.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e4d5e31bea86e1b6e0f5a0f95e14e80018e6f6c0129256d2915a4b3d793644d", size = 24933975, upload-time = "2026-04-15T21:42:18.828Z" }, - { url = "https://files.pythonhosted.org/packages/68/55/3b517ec8297f110d6981f525cccf26f86e30883fbb9c282769cffbcdcfca/uv-0.11.7-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:ceae53b202ea92bc954759bc7c7570cdcd5c3512fce15701198c19fd2dfb8605", size = 23706403, upload-time = "2026-04-15T21:43:10.664Z" }, - { url = "https://files.pythonhosted.org/packages/dc/30/7d93a0312d60e147722967036dc8ea37baab4802784bddc22464cb707deb/uv-0.11.7-py3-none-manylinux_2_31_riscv64.musllinux_1_1_riscv64.whl", hash = "sha256:f97e9f4e4d44fb5c4dfaa05e858ef3414a96416a2e4af270ecd88a3e5fb049a9", size = 24495797, upload-time = "2026-04-15T21:42:26.538Z" }, - { url = "https://files.pythonhosted.org/packages/8c/89/d49480bdab7725d36982793857e461d471bde8e1b7f438ffccee677a7bf8/uv-0.11.7-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:750ee5b96959b807cf442b73dd8b55111862d63f258f896787ea5f06b68aaca9", size = 24580471, upload-time = "2026-04-15T21:42:52.871Z" }, - { url = "https://files.pythonhosted.org/packages/b6/9f/c57dc03b48be17b564e304eb9ff982890c12dfb888b1ce370788733329ab/uv-0.11.7-py3-none-musllinux_1_1_i686.whl", hash = "sha256:f394331f0507e80ee732cb3df737589de53bed999dd02a6d24682f08c2f8ac4f", size = 24113637, upload-time = "2026-04-15T21:42:34.094Z" }, - { url = "https://files.pythonhosted.org/packages/13/ba/b87e358b629a68258527e3490e73b7b148770f4d2257842dea3b7981d4e8/uv-0.11.7-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:0df59ab0c6a4b14a763e8445e1c303af9abeb53cdfa4428daf9ff9642c0a3cce", size = 25119850, upload-time = "2026-04-15T21:43:22.529Z" }, - { url = "https://files.pythonhosted.org/packages/4b/74/16d229e1d8574bcbafa6dc643ac20b70c3e581f42ac31a6f4fd53035ffe3/uv-0.11.7-py3-none-win32.whl", hash = "sha256:553e67cc766d013ce24353fecd4ea5533d2aedcfd35f9fac430e07b1d1f23ed4", size = 22918454, upload-time = "2026-04-15T21:42:58.702Z" }, - { url = "https://files.pythonhosted.org/packages/a6/1d/b73e473da616ac758b8918fb218febcc46ddf64cba9e03894dfa226b28bd/uv-0.11.7-py3-none-win_amd64.whl", hash = "sha256:5674dfb5944513f4b3735b05c2deba6b1b01151f46729d533d413a9a905f8c5d", size = 25447744, upload-time = "2026-04-15T21:42:48.813Z" }, - { url = "https://files.pythonhosted.org/packages/1b/bb/e6bfdea92ed270f3445a5a3c17599d041b3f2dbc5026c09e02830a03bbaf/uv-0.11.7-py3-none-win_arm64.whl", hash = "sha256:6158b7e39464f1aa1e040daa0186cae4749a78b5cd80ac769f32ca711b8976b1", size = 23941816, upload-time = "2026-04-15T21:43:06.732Z" }, +version = "0.11.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c1/cd/4393fecb083897e956f016d4e66d0b8a496a08fe2e03cbda32a1e91da7ee/uv-0.11.8.tar.gz", hash = "sha256:bb2cf302b8503629aab6f0090a05551e6f8cfc2d687ca059cad7ec9e11214335", size = 4098020, upload-time = "2026-04-27T13:15:31.625Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/99/84/dcb676a3e36a3a2b44dc2e4dfea471b8cd709025e27cce3e588b176fd899/uv-0.11.8-py3-none-linux_armv6l.whl", hash = "sha256:a53e704a780a9e78a50f5a880e99a690f84e6fb9e82610903ce26f47c271d74c", size = 23664296, upload-time = "2026-04-27T13:15:15.644Z" }, + { url = "https://files.pythonhosted.org/packages/86/05/557aa070fda7b8460bbbe1e867e8e5b80602c5b30ed77d1d94fc5acae518/uv-0.11.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d414fc3795b6f56fb6b1fa359537930924fdfe857750a144d2aedf3077be3f1d", size = 23087321, upload-time = "2026-04-27T13:15:36.193Z" }, + { url = "https://files.pythonhosted.org/packages/d5/62/82953018801a250e16b091ef4b5e95e939b2f01224363d6fc80f600b7eff/uv-0.11.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f0d402e182ab581e934c159cc9edf25ec6e08d32f29aa797980e949afefc87cd", size = 21747142, upload-time = "2026-04-27T13:15:20.4Z" }, + { url = "https://files.pythonhosted.org/packages/af/4c/477f2abe16f9a3d3c73077f15615878a303eef3760115ec946be58ecb9b2/uv-0.11.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:877c9af3b3955a35ef739e5b2ba79c56dae5c4d50420a7ed908c0901e1c8c807", size = 23425861, upload-time = "2026-04-27T13:15:10.374Z" }, + { url = "https://files.pythonhosted.org/packages/2a/63/19f46193e49f0c9bf33346a4d726313871864db16e7cdd1c0a63bc112000/uv-0.11.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.musllinux_1_1_armv7l.whl", hash = "sha256:8278144df8d80a83f770c264a5e79ea50791316d2a0dda869e53b3c1174142a8", size = 23215551, upload-time = "2026-04-27T13:15:38.706Z" }, + { url = "https://files.pythonhosted.org/packages/72/3e/5595b265df848a33cd060b10e8f763a46d67521ac9f6c314e8a4ad5329d7/uv-0.11.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b3494ad32465f4e02259cfb104d24efe5bb8f7a782351f0354de9385415fb310", size = 23224170, upload-time = "2026-04-27T13:15:18.083Z" }, + { url = "https://files.pythonhosted.org/packages/a6/b3/6ca95e690b52542caa1dae10ede57732f90c629946ab5f027ff746f87deb/uv-0.11.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4421e27e81f85bce3bdb75986c38b5f9bfab9cdccaf3d977cf124b3f0f0b989", size = 24730048, upload-time = "2026-04-27T13:15:13.254Z" }, + { url = "https://files.pythonhosted.org/packages/ea/49/71b7322067c85a3736a22a300072b0566991fe3f95b81bed793508ff5315/uv-0.11.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91943e77fc962752d4f64ad5739219858395981078051c740b28b52963b366aa", size = 25585906, upload-time = "2026-04-27T13:15:41.455Z" }, + { url = "https://files.pythonhosted.org/packages/37/16/4e84cd5131327fe86d4784ebfc8a983149f4e6b811476ef271fc548b29e6/uv-0.11.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:41fbba287efcc9bc9505a60549b3a223220da720eacd03be8c23d9daaafa44f4", size = 24795740, upload-time = "2026-04-27T13:15:49.842Z" }, + { url = "https://files.pythonhosted.org/packages/5b/01/df175979018743cc5ba6e2fb9dcec916868271e8d88cf0b9df8fd805a0df/uv-0.11.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d97bb2920d6cddc07faa475013461294cc09b77ec8139278416c6e54b938d037", size = 24824980, upload-time = "2026-04-27T13:15:53.506Z" }, + { url = "https://files.pythonhosted.org/packages/1c/95/93c7f595f7136fb32807442860c55d0faed2cd3d7da4b7105ed3c2535d5f/uv-0.11.8-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:fb6a755305eb1e081dfe6a8bc007dbae2d26fe75e551656ca7c9cd08fba21d26", size = 23526790, upload-time = "2026-04-27T13:15:04.955Z" }, + { url = "https://files.pythonhosted.org/packages/04/02/77430b89e172c20cc549b07a5b1dfda0c882c161b6d82781d3150a7063ac/uv-0.11.8-py3-none-manylinux_2_31_riscv64.musllinux_1_1_riscv64.whl", hash = "sha256:841ecbb38532698f73b14b49dc5f0c5e756194c7fcf6e5c6b7ed3859200fe91b", size = 24280498, upload-time = "2026-04-27T13:15:43.978Z" }, + { url = "https://files.pythonhosted.org/packages/8a/e3/23e4a2bb91e3880e017e6116886e2d0bde14ba6aa95ddc458160ee630e7c/uv-0.11.8-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:b3ff2b20c1897105ebe7ed7f9b1b331c7171da029bc1e35970ce31dc086141c1", size = 24375233, upload-time = "2026-04-27T13:15:25.753Z" }, + { url = "https://files.pythonhosted.org/packages/d9/67/fb7dc17cea816a667d1be2632525aa1687566bfafd17bdac561a7a6c9484/uv-0.11.8-py3-none-musllinux_1_1_i686.whl", hash = "sha256:ad381228b0170ef9646902c7e908d4a10a7ecc3da8139450506cf70c7e7f3e80", size = 23904818, upload-time = "2026-04-27T13:15:23.21Z" }, + { url = "https://files.pythonhosted.org/packages/4b/91/b920e35f54f8c6b51f2c639e8170bb80a47a739a1442fea33a479bc93a3d/uv-0.11.8-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:0172b5215544844cd3db0fa3c73a2eb74999b3f00cd2527dde578725076d7b65", size = 25015448, upload-time = "2026-04-27T13:15:46.666Z" }, + { url = "https://files.pythonhosted.org/packages/05/e8/3771956dc1c94b8484789bb8070d91872080d0af99332b8bdec7218c2bfd/uv-0.11.8-py3-none-win32.whl", hash = "sha256:e71c1dd23cbb480f3952c3a95b4fd00f96bd618e2a94583fc9388c500af3070d", size = 22823583, upload-time = "2026-04-27T13:15:33.674Z" }, + { url = "https://files.pythonhosted.org/packages/f9/9b/a91a9c60dcae0e1e3da06377d38f32118a523697d461fe41bc9f117ecf59/uv-0.11.8-py3-none-win_amd64.whl", hash = "sha256:306c624c68d95dd7ea3647675323d72c1abc25f91c3e92ae4cd6f0f11b508726", size = 25407438, upload-time = "2026-04-27T13:15:28.957Z" }, + { url = "https://files.pythonhosted.org/packages/61/5d/defa29fe617e6f07d4e514089e9d36fd9f44ede869e597e39ff7d69f6917/uv-0.11.8-py3-none-win_arm64.whl", hash = "sha256:a9853456696d579f206135c9dda7227a6ed8311b8a9a0b9b2008c4ae81950efe", size = 23914243, upload-time = "2026-04-27T13:15:07.717Z" }, ] [[package]] @@ -3190,7 +3190,7 @@ wheels = [ [[package]] name = "virtualenv" -version = "21.2.4" +version = "21.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "distlib" }, @@ -3198,9 +3198,9 @@ dependencies = [ { name = "platformdirs" }, { name = "python-discovery" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0c/98/3a7e644e19cb26133488caff231be390579860bbbb3da35913c49a1d0a46/virtualenv-21.2.4.tar.gz", hash = "sha256:b294ef68192638004d72524ce7ef303e9d0cf5a44c95ce2e54a7500a6381cada", size = 5850742, upload-time = "2026-04-14T22:15:31.438Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/8b/6331f7a7fe70131c301106ec1e7cf23e2501bf7d4ca3636805801ca191bb/virtualenv-21.3.0.tar.gz", hash = "sha256:733750db978ec95c2d8eb4feadaa57091002bce404cb39ba69899cf7bd28944e", size = 7614069, upload-time = "2026-04-27T17:05:58.927Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/27/8d/edd0bd910ff803c308ee9a6b7778621af0d10252219ad9f19ef4d4982a61/virtualenv-21.2.4-py3-none-any.whl", hash = "sha256:29d21e941795206138d0f22f4e45ff7050e5da6c6472299fb7103318763861ac", size = 5831232, upload-time = "2026-04-14T22:15:29.342Z" }, + { url = "https://files.pythonhosted.org/packages/4b/eb/03bfb1299d4c4510329e470f13f9a4ce793df7fcb5a2fd3510f911066f61/virtualenv-21.3.0-py3-none-any.whl", hash = "sha256:4d28ee41f6d9ec8f1f00cd472b9ffbcedda1b3d3b9a575b5c94a2d004fd51bd7", size = 7594690, upload-time = "2026-04-27T17:05:55.468Z" }, ] [[package]] @@ -3311,11 +3311,11 @@ wheels = [ [[package]] name = "wcwidth" -version = "0.6.0" +version = "0.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/35/a2/8e3becb46433538a38726c948d3399905a4c7cabd0df578ede5dc51f0ec2/wcwidth-0.6.0.tar.gz", hash = "sha256:cdc4e4262d6ef9a1a57e018384cbeb1208d8abbc64176027e2c2455c81313159", size = 159684, upload-time = "2026-02-06T19:19:40.919Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/ee/afaf0f85a9a18fe47a67f1e4422ed6cf1fe642f0ae0a2f81166231303c52/wcwidth-0.7.0.tar.gz", hash = "sha256:90e3a7ea092341c44b99562e75d09e4d5160fe7a3974c6fb842a101a95e7eed0", size = 182132, upload-time = "2026-05-02T16:04:12.653Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/68/5a/199c59e0a824a3db2b89c5d2dade7ab5f9624dbf6448dc291b46d5ec94d3/wcwidth-0.6.0-py3-none-any.whl", hash = "sha256:1a3a1e510b553315f8e146c54764f4fb6264ffad731b3d78088cdb1478ffbdad", size = 94189, upload-time = "2026-02-06T19:19:39.646Z" }, + { url = "https://files.pythonhosted.org/packages/41/52/e465037f5375f43533d1a80b6923955201596a99142ed524d77b571a1418/wcwidth-0.7.0-py3-none-any.whl", hash = "sha256:5d69154c429a82910e241c738cd0e2976fac8a2dd47a1a805f4afed1c0f136f2", size = 110825, upload-time = "2026-05-02T16:04:11.033Z" }, ] [[package]] @@ -3533,7 +3533,7 @@ wheels = [ [[package]] name = "zensical" -version = "0.0.36" +version = "0.0.39" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -3544,20 +3544,20 @@ dependencies = [ { name = "pyyaml" }, { name = "tomli" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/52/e9/8d0e66ad113e702d7f5eed2cc5ad0f035cb212c49b0415553473f2da900b/zensical-0.0.36.tar.gz", hash = "sha256:32126c57fd241267e55c863f2bdd31bfe4422c376280e74e4a1036a89c0d513c", size = 3897092, upload-time = "2026-04-23T15:37:46.892Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f2/ff/2846737502a9ae783570b32aac4f20f5232512fbf245bbf1c0398728c7ed/zensical-0.0.36-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:3d42312267c4124ed67ddfd2809167bdd3ea4f71892c8a20897be98b66da8b73", size = 12515534, upload-time = "2026-04-23T15:37:07.815Z" }, - { url = "https://files.pythonhosted.org/packages/84/e9/443b561793ed6626cb46c328fd8fd916a7b18e5af5349934c5346438548c/zensical-0.0.36-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:8462c133c8da5234cd301ad3c722d52d66a0092a51b7b93e2ce12f217976b29b", size = 12384874, upload-time = "2026-04-23T15:37:11.617Z" }, - { url = "https://files.pythonhosted.org/packages/7a/f0/faecf0a5dff381ff331b7b87d385c8335ca0b7297a33d85abc3313cfa598/zensical-0.0.36-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0a6dc86dc0d8488b18c6501d62b63989a538350a33173347da8b9f1f54bed2c", size = 12764889, upload-time = "2026-04-23T15:37:14.512Z" }, - { url = "https://files.pythonhosted.org/packages/b0/56/1ddee63d323d779733e5bf00e99c878f03e50b77f294711a850c1e1ceddb/zensical-0.0.36-cp310-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d31c726d7f13601a568a2a9e80592472da24657ff5428ef15c2c95bc458cb65b", size = 12705679, upload-time = "2026-04-23T15:37:18.038Z" }, - { url = "https://files.pythonhosted.org/packages/9b/61/4b264b1466251450856ed4768fa9a793f7c24172039f47f562cd899e0744/zensical-0.0.36-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a7e8b32e41784d19122cb16a0bd6fcb53852177ce689ceba1ba7a8bb20fe3a0", size = 13057470, upload-time = "2026-04-23T15:37:21.594Z" }, - { url = "https://files.pythonhosted.org/packages/17/9b/c44a1ebc2fe8daadecbd9ea41c498e545c494204e239314347fbcec51159/zensical-0.0.36-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abe5d24716107edb033c2326c816b891952b98b9637c5308f5320712a2e70aac", size = 12792788, upload-time = "2026-04-23T15:37:24.784Z" }, - { url = "https://files.pythonhosted.org/packages/97/94/4d0e345f75f892fce029b513a26f4491b6dd39ff73c5bee3f8fbb9305e8c/zensical-0.0.36-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9ed7a54465b497d1548aeb6b38a99ac6f45c8f191a5cf2a180902af28c0cd58a", size = 12940940, upload-time = "2026-04-23T15:37:27.975Z" }, - { url = "https://files.pythonhosted.org/packages/de/2e/4612b97d8d493a6ac591ebb28a6b3a592eb4d969bbb8a92311125fe0b874/zensical-0.0.36-cp310-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:282eb4eaf7cd3bd389a4b826c1c13a30136e5c6fcfcafce26fc27cd05acc660f", size = 12980355, upload-time = "2026-04-23T15:37:30.998Z" }, - { url = "https://files.pythonhosted.org/packages/c1/90/c1a91b503aec105cdb7ccf4d466e8612c113186f090c61d795272cecce27/zensical-0.0.36-cp310-abi3-musllinux_1_2_i686.whl", hash = "sha256:36d5719df268697dbcf7aa5bbea9eea353501c80b1c6c17d6c7f2c69405be9af", size = 13124220, upload-time = "2026-04-23T15:37:34.506Z" }, - { url = "https://files.pythonhosted.org/packages/ac/e0/b9ffadaff0b80498699aaf0f2bcc0b659db074fd94071520d22f035e5125/zensical-0.0.36-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7771aaf33f7d06f779e041930812fe65f5f97a6f4fbd1c7e51924ce1a27c0c66", size = 13070894, upload-time = "2026-04-23T15:37:38.092Z" }, - { url = "https://files.pythonhosted.org/packages/c5/c3/aea29875f7b89d7c79b84a30259356404bf778d42c27c36632ef19aa826c/zensical-0.0.36-cp310-abi3-win32.whl", hash = "sha256:61f1dff7c38a8d0acb054c11426c25f0a57b973703eb3d0bf1e8cc04ca54d047", size = 12084318, upload-time = "2026-04-23T15:37:41.093Z" }, - { url = "https://files.pythonhosted.org/packages/6d/fd/6d7b2088180624e3c6dd9471788ac277b9ae3091a4da1b23a191c8ed6419/zensical-0.0.36-cp310-abi3-win_amd64.whl", hash = "sha256:be08cdf13599cfa92d71563ec12058ab20f234ed5489293b83b0f29563cc588a", size = 12301398, upload-time = "2026-04-23T15:37:44.07Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/b4/b1/819add51b42b0aac618622582db631a71cc7c6d91ab4787d73a7bd9badb1/zensical-0.0.39.tar.gz", hash = "sha256:2a8713c54362adb0881e9b0514b5ad9a696324756699dedb55fa1cbf3ccc0eda", size = 3914611, upload-time = "2026-05-01T16:33:28.402Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/f0/f1103a861cd610ae6cf31c4132530b7c11b023301d58f9adcdcac52e62f9/zensical-0.0.39-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:1eb9b79936f968c0746a534ac39ddd138b5e86126843f653afaa84cfa1e8add5", size = 12669448, upload-time = "2026-05-01T16:32:54.356Z" }, + { url = "https://files.pythonhosted.org/packages/56/97/e5a79bb004f834b7618388df99e609e4cee4e4876c81ac1c48991fee4780/zensical-0.0.39-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:093a4763c62ead4fe9cf7bf05857ce37290e4fa1903795b3dcb3006767d6c818", size = 12534151, upload-time = "2026-05-01T16:32:57.279Z" }, + { url = "https://files.pythonhosted.org/packages/50/d4/a02fdf9a68a0229cc96d865816df5799322e8515188ffecb29eef40538d3/zensical-0.0.39-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d86ca51a69dcadd04cc664b13c76663be9067107564276a3acf8ab90c9e0b3aa", size = 12926235, upload-time = "2026-05-01T16:33:00.174Z" }, + { url = "https://files.pythonhosted.org/packages/3d/07/c36890d69273fc6bbe61d179641ce616e6e100308298b48a16019a5bd6fd/zensical-0.0.39-cp310-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:849426b5a41b58da8cef85f7d248be780b512a88953309b9a7b4879d71a37b9c", size = 12888428, upload-time = "2026-05-01T16:33:02.716Z" }, + { url = "https://files.pythonhosted.org/packages/6f/42/beb8c456ef9926dc91b288d27674813078f75dce7b28fc17c2f97168ca5c/zensical-0.0.39-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba187d2d555da85a5cde635ef32f7bee1d3634522f25b21d58a14f41a128656c", size = 13251625, upload-time = "2026-05-01T16:33:05.429Z" }, + { url = "https://files.pythonhosted.org/packages/36/95/b31d67aa17d79048d16fe2da47176fee02eff9a0840df3a583865c0b5048/zensical-0.0.39-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:601c3ba0fc33a8fa5f14470ee99757f9946c370dd4c46c7aacf056dbdcab1df6", size = 12960908, upload-time = "2026-05-01T16:33:08.639Z" }, + { url = "https://files.pythonhosted.org/packages/5a/8c/c01c850510931fb247a9d6b6bb0bc3a9c1e7185157551ad9baf31a640e51/zensical-0.0.39-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b275d099b53e179188897037667261c9b6c647d0829ef7dbbd40f0e9ea0a1c4b", size = 13102894, upload-time = "2026-05-01T16:33:11.747Z" }, + { url = "https://files.pythonhosted.org/packages/f0/4e/d0d72beedd002986b2932f608a0390973fd50c79e0e74636766cabe4b985/zensical-0.0.39-cp310-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:8b670c6a6d8b6d484e83f9b80387d73213549f473cadb19a12026e0edbcafc7f", size = 13159755, upload-time = "2026-05-01T16:33:14.728Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f1/b0224a8307e806ac5887f7af3b4b5483f5656e952edadb0474ff72dc6373/zensical-0.0.39-cp310-abi3-musllinux_1_2_i686.whl", hash = "sha256:1fe98c35748bd0b99dc45f843508a174362e77216dbea34321ec06aed2810816", size = 13310315, upload-time = "2026-05-01T16:33:17.356Z" }, + { url = "https://files.pythonhosted.org/packages/3c/c0/2c872d7e40030dd2f7b82d78f23ae2b259e5657d9712398968af4ffd542c/zensical-0.0.39-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:aafe2650e2f2814e95f79500cd121e35bb2fdda9b63255bb4ac4e530a05d3524", size = 13245269, upload-time = "2026-05-01T16:33:20.426Z" }, + { url = "https://files.pythonhosted.org/packages/9c/18/24cfab771f36576c8fa098c47dbc61fc19f35f6e456169dc18b5d7e6b024/zensical-0.0.39-cp310-abi3-win32.whl", hash = "sha256:c49f006444f356feb2b4067fcf1f0a2050c444c194e0e95bc5e1f53fe3f9655a", size = 12242128, upload-time = "2026-05-01T16:33:23.345Z" }, + { url = "https://files.pythonhosted.org/packages/64/8b/39bb18c55aad6ca413c1a808693a835b19f4173b460865924d04189acac9/zensical-0.0.39-cp310-abi3-win_amd64.whl", hash = "sha256:c1c0befb8ba10aa16f0acdea9373da5eae8932d50bc996fdc54598e9483ab667", size = 12470551, upload-time = "2026-05-01T16:33:26.185Z" }, ] [[package]]