Skip to content

test(server): regression test for context-echo on dispatcher-wrapped non-AdcpError #1248

test(server): regression test for context-echo on dispatcher-wrapped non-AdcpError

test(server): regression test for context-echo on dispatcher-wrapped non-AdcpError #1248

Workflow file for this run

name: CI
on:
push:
branches: [main, python-adcp-sdk-setup]
pull_request:
branches: [main]
concurrency:
group: ci-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
name: Test Python ${{ matrix.python-version }}
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.10", "3.11", "3.12", "3.13"]
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e ".[dev]"
- name: Run linter
run: |
ruff check src/
- name: Run type checker
run: |
mypy src/adcp/
- name: Run tests
run: |
pytest tests/ -v --cov=src/adcp --cov-report=term-missing
pg-conformance:
name: Postgres conformance tests (Postgres 16)
runs-on: ubuntu-latest
services:
postgres:
# CI-local ephemeral database. POSTGRES_HOST_AUTH_METHOD=trust
# avoids shipping any password literal (real or placeholder) in
# this workflow — GitHub's default CI network is already the
# trust boundary for this throwaway service.
image: postgres:16
env:
POSTGRES_HOST_AUTH_METHOD: trust
POSTGRES_DB: adcp_test
ports:
- 5432:5432
options: >-
--health-cmd pg_isready
--health-interval 5s
--health-timeout 5s
--health-retries 10
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.12
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Install dependencies (with [pg] extra)
run: |
python -m pip install --upgrade pip
pip install -e ".[dev,pg]"
- name: Run Postgres conformance tests
env:
ADCP_PG_TEST_URL: postgresql://postgres@localhost:5432/adcp_test
run: |
pytest tests/conformance/signing/test_pg_replay_store.py \
tests/conformance/signing/test_pg_replay_store_e2e.py \
tests/conformance/decisioning/test_pg_buyer_agent_registry.py \
tests/conformance/decisioning/test_pg_idempotency_backend.py \
-v
conventional-commits:
name: Validate conventional commit format
runs-on: ubuntu-latest
if: github.event_name == 'pull_request'
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Validate PR commits
uses: amannn/action-semantic-pull-request@v5
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Validate individual commits
run: |
# Get the base branch
BASE_SHA=$(git merge-base origin/${{ github.base_ref }} HEAD)
# Check each commit since the base
echo "Validating commits since $BASE_SHA..."
git log --format="%H %s" $BASE_SHA..HEAD | while read sha message; do
# Skip merge commits (GitHub automatically creates these)
if echo "$message" | grep -qE '^Merge [0-9a-f]+ into [0-9a-f]+'; then
echo "⊙ Skipping merge commit: $sha"
continue
fi
# Check if message matches conventional commit format
if ! echo "$message" | grep -qE '^(feat|fix|docs|style|refactor|perf|test|build|ci|chore|revert)(\([^)]+\))?!?: .+'; then
echo "❌ Commit $sha does not follow Conventional Commits format:"
echo " $message"
echo ""
echo "Expected format: <type>[optional scope]: <description>"
echo "Types: feat, fix, docs, style, refactor, perf, test, build, ci, chore, revert"
echo ""
echo "Examples:"
echo " feat: add new feature"
echo " fix: resolve bug in parser"
echo " feat(api): add new endpoint"
echo " feat!: breaking change"
exit 1
else
echo "✓ $sha: $message"
fi
done
echo ""
echo "✅ All commits follow Conventional Commits format"
downstream-imports:
name: Downstream import smoke (representative consumer symbols)
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.11
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Build and install wheel
run: |
python -m pip install --upgrade pip build
python -m build --wheel --outdir dist/
pip install dist/*.whl
# Proxy for real downstream import sites (salesagent, creative agents,
# signals agents). Any ImportError here means we broke the public API
# surface without a migration pointer — failing CI is the goal.
- name: Import representative public-API symbols
run: |
python - <<'PY'
from adcp import (
ADCPClient,
AgentConfig,
BrandReference,
CpmPricingOption,
CreateMediaBuyRequest,
Error,
GetProductsRequest,
ListCreativesRequest,
MediaBuyStatus,
Package,
PackageRequest,
PublisherPropertiesAll,
SyncCatalogsRequest,
)
from adcp.types import (
AudioFormatAsset,
BriefFormatAsset,
CatalogFormatAsset,
ContextObject,
CreativeAsset,
CssFormatAsset,
DaastFormatAsset,
HtmlFormatAsset,
ImageFormatAsset,
JavascriptFormatAsset,
MarkdownFormatAsset,
RepeatableAssetGroup,
TargetingOverlay,
TextFormatAsset,
UrlFormatAsset,
VastFormatAsset,
VideoFormatAsset,
WebhookFormatAsset,
)
# Removed-type shims: old import paths must raise a guided
# ImportError pointing at the migration guide.
import adcp
for name in ("BrandManifest", "FormatCategory", "DeliverTo"):
try:
getattr(adcp, name)
except ImportError as exc:
assert "MIGRATION_v3_to_v4" in str(exc), (
f"{name} deprecation shim dropped migration pointer: {exc}"
)
else:
raise AssertionError(
f"{name} import should raise ImportError with migration pointer"
)
# The deep submodule path (some older import sites reach this far)
# must also surface the migration pointer, not a bare ModuleNotFoundError.
try:
from adcp.types.generated_poc.enums.format_category import FormatCategory # noqa: F401
except ImportError as exc:
assert "MIGRATION_v3_to_v4" in str(exc), exc
else:
raise AssertionError(
"format_category submodule should raise ImportError with migration pointer"
)
assert adcp.__version__ and adcp.__version__ != "3.12.0", (
f"adcp.__version__={adcp.__version__!r} — expected real pkg metadata"
)
assert adcp.get_adcp_version(), "ADCP_VERSION file is empty"
print(f"OK — adcp=={adcp.__version__}, spec={adcp.get_adcp_version()}")
PY
schema-check:
name: Validate schemas are up-to-date
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e ".[dev]"
- name: Check ADCP version
id: version-check
run: |
VERSION=$(cat src/adcp/ADCP_VERSION)
echo "ADCP_VERSION=$VERSION"
# Skip regeneration + drift check for pre-release tags (alpha/beta/rc)
# and for `latest`, which is a moving dev snapshot — the committed
# generated types are frozen against the bundle we last synced, and
# CI's fresh sync against today's `latest.tgz` is expected to drift.
if echo "$VERSION" | grep -qE '(alpha|beta|rc)' || [ "$VERSION" = "latest" ]; then
echo "is_prerelease=true" >> $GITHUB_OUTPUT
echo "Pre-release / latest version detected - will skip schema sync"
else
echo "is_prerelease=false" >> $GITHUB_OUTPUT
echo "Stable version - will sync schemas from upstream"
fi
# Stable upstream tags ship Sigstore sidecars; sync_schemas.py
# verifies the bundle via `cosign verify-blob` before extraction.
# Installs cosign from the official sigstore/cosign-installer action.
- name: Install cosign (for signature verification)
if: steps.version-check.outputs.is_prerelease != 'true'
uses: sigstore/cosign-installer@v3
- name: Download latest schemas
if: steps.version-check.outputs.is_prerelease != 'true'
run: python scripts/sync_schemas.py
- name: Fix schema references
if: steps.version-check.outputs.is_prerelease != 'true'
run: python scripts/fix_schema_refs.py
- name: Bundle schemas into package
if: steps.version-check.outputs.is_prerelease != 'true'
run: python scripts/bundle_schemas.py
- name: Generate models
if: steps.version-check.outputs.is_prerelease != 'true'
run: python scripts/generate_types.py
- name: Validate generated code syntax
run: |
echo "Validating generated code can be parsed..."
python -m py_compile src/adcp/types/_generated.py
echo "✓ Syntax validation passed"
- name: Validate generated code imports
run: |
echo "Validating generated code can be imported..."
python -c "from adcp.types import _generated as generated; print(f'✓ Successfully imported {len(dir(generated))} symbols')"
- name: Run code generation tests
run: |
echo "Running code generation test suite..."
pytest tests/test_code_generation.py -v --tb=short
- name: Check for schema drift
if: steps.version-check.outputs.is_prerelease != 'true'
run: |
# datamodel-codegen's numbered-variant class names
# (Pass1/Pass4, Status16/Status17, StatusFilter1/StatusFilter4,
# Type80, etc.) shift between regens because the generator
# walks the schema graph in filesystem-iteration order and
# APFS (macOS) vs. ext4 (Linux CI) sort differently. The
# numbers are an implementation detail; semantic aliases in
# ``src/adcp/types/aliases.py`` pin the names downstream
# actually uses.
#
# The real drift guarantees we need are enforced elsewhere:
# * ``tests/test_schemas_version_pin.py`` — ADCP_VERSION
# matches ``schemas/cache/index.json.adcp_version`` on
# every test run.
# * This job's "Validate generated code syntax/imports"
# steps above — the regenerated code compiles and imports.
# * ``tests/test_asset_aliases_stable.py`` — the semantic
# aliases still point at valid classes.
#
# We keep this step as a "regen runs without error on stable
# tags" smoke — but don't fail on line-level diff, because
# the non-determinism produces false positives that block
# release PRs for cosmetic churn.
if git diff --quiet src/adcp/types/_generated.py schemas/cache/; then
echo "✓ Schemas are up-to-date (no diff)"
else
echo "ℹ Regen produced cosmetic diff — see aliases.py for stable names"
echo " Numbered-variant class-name churn is expected; the semantic"
echo " alias tests and drift-version-pin test guard the real surface."
fi
storyboard:
name: AdCP storyboard runner — examples/seller_agent.py
runs-on: ubuntu-latest
# Non-blocking until seller-agent content gaps in #304 are resolved.
# Promote to required once overall_status: passing and controller_detected: true.
continue-on-error: true
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.12
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Set up Node 22
uses: actions/setup-node@v4
with:
node-version: "22"
# Cache the npm tarball + extracted package directory so the
# storyboard runner install isn't a cold network fetch every run.
# Key by OS only (not by version) so the cache survives across
# ``@adcp/sdk`` releases — npm install reuses tarballs that are
# already in the cache and only fetches the delta. ``@latest`` is
# intentional for drift detection (see "Run storyboard suite"
# below); the cache amortizes the 5-15 s of fetch+extract that
# would otherwise repeat on every CI run.
- name: Cache ~/.npm
uses: actions/cache@v4
with:
path: ~/.npm
key: ${{ runner.os }}-npm-adcp-sdk
restore-keys: |
${{ runner.os }}-npm-
- name: Pre-install @adcp/sdk (once, then call binary directly)
# Single install step at the top of the job; subsequent runner
# calls invoke the already-installed binary instead of paying
# the ``npx -y -p ...`` per-invocation extract+link tax.
# ``@adcp/sdk@latest`` is intentionally unpinned: this is AdCP's
# own CI running AdCP's own canonical runner — tracking latest
# surfaces protocol drift as soon as it ships, which is the
# point of this job.
run: |
npm install -g @adcp/sdk@latest
adcp --version
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e ".[dev]"
- name: Start seller agent
run: |
ADCP_PORT=3001 python examples/seller_agent.py &
AGENT_PID=$!
for i in $(seq 1 60); do
# Any HTTP response (including 405 on GET to a POST-only endpoint)
# means the server is up and accepting connections.
# ``||`` runs on the assignment so curl's "000" stdout and the
# fallback don't concatenate when the connection is refused.
HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" --max-time 1 \
http://127.0.0.1:3001/mcp 2>/dev/null) || HTTP_CODE="000"
if [ "$HTTP_CODE" != "000" ]; then
echo "Seller agent ready (HTTP ${HTTP_CODE}, pid ${AGENT_PID})"
break
fi
if ! kill -0 "$AGENT_PID" 2>/dev/null; then
echo "Seller agent process died during startup"
exit 1
fi
if [ "$i" -eq 60 ]; then
echo "Seller agent failed to start within 30s"
kill "$AGENT_PID" 2>/dev/null || true
exit 1
fi
sleep 0.5
done
- name: Run storyboard suite
timeout-minutes: 5
# ``adcp`` was installed once at job start (see "Pre-install"
# step) — call the binary directly to skip per-invocation
# ``npx`` extract+link overhead.
run: |
adcp storyboard run \
http://127.0.0.1:3001/mcp media_buy_seller \
--json --allow-http \
> storyboard-result.json
- name: Assert pass
run: |
python -c "
import json, sys, pathlib
p = pathlib.Path('storyboard-result.json')
if not p.exists() or p.stat().st_size == 0:
print('storyboard-result.json missing or empty — runner produced no output')
sys.exit(1)
with p.open() as f:
d = json.load(f)
if d.get('overall_status') != 'passing':
print(json.dumps(d, indent=2))
sys.exit(1)
if not d.get('controller_detected'):
print('controller_detected was false; check DemoStore overrides (see #304)')
sys.exit(1)
"
- if: always()
uses: actions/upload-artifact@v4
with:
name: storyboard-result-${{ github.run_attempt }}
path: storyboard-result.json
if-no-files-found: warn
v3-reference-seller-tests:
name: v3 reference seller — pytest (respx-mocked upstream)
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.12
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e ".[dev]"
# Example-local deps: the v3 reference seller imports
# sqlalchemy + asyncpg + httpx-respx but those aren't in the
# SDK's [dev] extras. Install them inline rather than adding
# a separate optional-dependencies group for a single example.
pip install "sqlalchemy>=2.0" "asyncpg>=0.29" "respx>=0.20"
- name: Run translator-pattern tests
# The tests respx-mock the JS mock-server upstream so we don't
# need to boot Node here. Storyboard CI (below) covers the
# real boot-the-upstream path.
run: |
pytest examples/v3_reference_seller/tests/ -v
storyboard-v3-reference-seller:
name: AdCP storyboard runner — v3 reference seller (translator)
runs-on: ubuntu-latest
# Required as of @adcp/sdk@6.7.0 (sales-guaranteed mock-server
# canonicalized; closes #449). Storyboard run + traffic-counter
# assertions gate every PR's translator-pattern conformance.
services:
postgres:
image: postgres:16
env:
POSTGRES_HOST_AUTH_METHOD: trust
POSTGRES_DB: adcp
ports:
- 5432:5432
options: >-
--health-cmd pg_isready
--health-interval 5s
--health-timeout 5s
--health-retries 10
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.12
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Set up Node 22
uses: actions/setup-node@v4
with:
node-version: "22"
# Cache ~/.npm by OS only so subsequent runs hit the tarball
# cache; npm install reuses what's there and only fetches the
# delta on a new ``@latest`` release. See the storyboard job
# above for the same pattern + rationale.
- name: Cache ~/.npm
uses: actions/cache@v4
with:
path: ~/.npm
key: ${{ runner.os }}-npm-adcp-sdk
restore-keys: |
${{ runner.os }}-npm-
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e ".[dev,pg]"
# Example-local deps: the v3 reference seller imports
# sqlalchemy + asyncpg (async Postgres driver) which aren't
# in the SDK's [dev,pg] extras. Install inline rather than
# adding an example-only optional-dependencies group.
pip install "sqlalchemy>=2.0" "asyncpg>=0.29" "respx>=0.20"
- name: Pre-install @adcp/sdk (once, then call binary directly)
run: |
npm install -g @adcp/sdk@latest
adcp --version
- name: Start JS mock-server upstream
run: |
# Cached install above means this is a hot-start (~2-3s on
# GHA-hosted runners) — no npm install delay. Tee output to
# /tmp/mock-server.log so failures surface their stack traces
# (otherwise the readiness loop just times out silently).
adcp mock-server sales-guaranteed --port 4503 --api-key test-key \
> /tmp/mock-server.log 2>&1 &
MOCK_PID=$!
echo "MOCK_PID=$MOCK_PID" >> "$GITHUB_ENV"
# Health-check via /_debug/traffic — non-network-scoped and
# no-auth, so it doesn't break when the JS mock's seed-data
# renames or removes a specific network. The endpoint is
# always present on the harness-side mock.
for i in $(seq 1 120); do
# ``||`` runs on the assignment so curl's "000" stdout and the
# fallback don't concatenate when the connection is refused.
HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" --max-time 1 \
http://127.0.0.1:4503/_debug/traffic 2>/dev/null) || HTTP_CODE="000"
if [ "$HTTP_CODE" = "200" ]; then
echo "Upstream mock ready (HTTP 200, pid $MOCK_PID, $i polls)"
break
fi
if [ "$i" -eq 120 ]; then
echo "Upstream mock failed to start within 60s"
echo "--- mock-server.log ---"
cat /tmp/mock-server.log || echo "(log unavailable)"
kill "$MOCK_PID" 2>/dev/null || true
exit 1
fi
sleep 0.5
done
- name: Seed Postgres fixtures
env:
DATABASE_URL: postgresql+asyncpg://postgres@127.0.0.1:5432/adcp
run: |
cd examples/v3_reference_seller
python -m seed
- name: Boot v3 reference seller (translator)
env:
DATABASE_URL: postgresql+asyncpg://postgres@127.0.0.1:5432/adcp
MOCK_AD_SERVER_URL: http://127.0.0.1:4503
MOCK_AD_SERVER_API_KEY: test-key
PORT: "3001"
run: |
cd examples/v3_reference_seller
python -m src.app &
SELLER_PID=$!
echo "SELLER_PID=$SELLER_PID" >> "$GITHUB_ENV"
for i in $(seq 1 60); do
# ``||`` runs on the assignment so curl's "000" stdout and the
# fallback don't concatenate when the connection is refused.
HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" --max-time 1 \
http://127.0.0.1:3001/mcp 2>/dev/null) || HTTP_CODE="000"
if [ "$HTTP_CODE" != "000" ]; then
echo "Seller ready (HTTP ${HTTP_CODE}, pid ${SELLER_PID})"
break
fi
if [ "$i" -eq 60 ]; then
echo "Seller failed to start within 30s"
kill "$SELLER_PID" 2>/dev/null || true
exit 1
fi
sleep 0.5
done
# Upstream-still-alive probe — guard against the upstream
# dying during seller startup (e.g. seller's connection
# handshake crashes the mock). If the upstream is gone,
# the storyboard run will fail in confusing ways; fail
# here with a clear diagnostic instead.
UPSTREAM_CODE=$(curl -s -o /dev/null -w "%{http_code}" --max-time 2 \
-H "Authorization: Bearer test-key" \
-H "X-Network-Code: net_premium_us" \
http://127.0.0.1:4503/v1/products 2>/dev/null || echo "000")
if [ "$UPSTREAM_CODE" != "200" ]; then
echo "Upstream mock no longer responding after seller boot (HTTP ${UPSTREAM_CODE})"
echo "The seller likely crashed the upstream during connection handshake."
kill "$SELLER_PID" 2>/dev/null || true
kill "$MOCK_PID" 2>/dev/null || true
exit 1
fi
echo "Upstream still alive after seller boot (HTTP ${UPSTREAM_CODE})"
- name: Run storyboard suite
timeout-minutes: 5
run: |
# /etc/hosts override so the buyer can reach acme.localhost
# (the seeded tenant subdomain).
echo "127.0.0.1 acme.localhost" | sudo tee -a /etc/hosts
# ``adcp`` was installed once at job start — call the binary
# directly to skip per-invocation ``npx`` extract+link.
adcp storyboard run \
http://acme.localhost:3001/mcp media_buy_seller \
--json --allow-http \
> v3-storyboard-result.json || true
cat v3-storyboard-result.json | head -50
- if: always()
uses: actions/upload-artifact@v4
with:
name: v3-storyboard-result-${{ github.run_attempt }}
path: examples/v3_reference_seller/v3-storyboard-result.json
if-no-files-found: warn
storyboard-multi-platform-seller:
name: AdCP storyboard runner — examples/multi_platform_seller (PlatformRouter)
runs-on: ubuntu-latest
# Multi-tenant proof: one process, two tenants, one router. Each
# tenant's storyboard runs against its own subdomain
# (tenant-a.localhost / tenant-b.localhost). Blocking gate — both
# tenants must pass the AdCP storyboard suite for the example to
# remain a credible reference. continue-on-error was dropped after
# the mocks were aligned with the wire contract; future drift now
# blocks CI.
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.12
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Set up Node 22
uses: actions/setup-node@v4
with:
node-version: "22"
- name: Cache ~/.npm
uses: actions/cache@v4
with:
path: ~/.npm
key: ${{ runner.os }}-npm-adcp-sdk
restore-keys: |
${{ runner.os }}-npm-
- name: Pre-install @adcp/sdk
run: |
npm install -g @adcp/sdk@latest
adcp --version
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e ".[dev]"
- name: Map tenant subdomains to localhost
run: |
# The storyboard runner connects to ``tenant-x.localhost`` —
# /etc/hosts gives those names a 127.0.0.1 mapping so the
# subdomain middleware on the seller process resolves the
# right tenant from the Host header.
echo "127.0.0.1 tenant-a.localhost tenant-b.localhost" \
| sudo tee -a /etc/hosts
- name: Boot multi-platform seller
run: |
ADCP_PORT=3001 python -m examples.multi_platform_seller.src.app &
SELLER_PID=$!
echo "SELLER_PID=$SELLER_PID" >> "$GITHUB_ENV"
for i in $(seq 1 60); do
HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" --max-time 1 \
http://127.0.0.1:3001/mcp 2>/dev/null) || HTTP_CODE="000"
if [ "$HTTP_CODE" != "000" ]; then
echo "Seller ready (HTTP ${HTTP_CODE}, pid ${SELLER_PID})"
break
fi
if ! kill -0 "$SELLER_PID" 2>/dev/null; then
echo "Seller process died during startup"
exit 1
fi
if [ "$i" -eq 60 ]; then
echo "Seller failed to start within 30s"
kill "$SELLER_PID" 2>/dev/null || true
exit 1
fi
sleep 0.5
done
- name: Run storyboard — tenant-a (sales-guaranteed)
timeout-minutes: 5
run: |
adcp storyboard run \
http://tenant-a.localhost:3001/mcp media_buy_seller \
--json --allow-http \
> tenant-a-storyboard.json
cat tenant-a-storyboard.json | head -50
- name: Run storyboard — tenant-b (sales-non-guaranteed)
timeout-minutes: 5
run: |
adcp storyboard run \
http://tenant-b.localhost:3001/mcp media_buy_seller \
--json --allow-http \
> tenant-b-storyboard.json
cat tenant-b-storyboard.json | head -50
- if: always()
uses: actions/upload-artifact@v4
with:
name: multi-platform-storyboards-${{ github.run_attempt }}
path: |
tenant-a-storyboard.json
tenant-b-storyboard.json
if-no-files-found: warn
storyboard-sales-proposal-mode:
name: AdCP storyboard runner — sales-proposal-mode (proposal_finalize)
runs-on: ubuntu-latest
# v1.5 ProposalManager finalize lifecycle proof. The mock seller
# declares ``finalize=True`` + wires an ``InMemoryProposalStore``;
# the framework's dispatch wiring intercepts ``refine[i].action='finalize'``
# requests, runs ``finalize_proposal``, commits via the store, and
# auto-hydrates ``ctx.recipes`` on subsequent ``create_media_buy``
# calls. This job is the storyboard-level proof that the design
# works end-to-end. Blocking gate — no continue-on-error.
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.12
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Set up Node 22
uses: actions/setup-node@v4
with:
node-version: "22"
- name: Cache ~/.npm
uses: actions/cache@v4
with:
path: ~/.npm
key: ${{ runner.os }}-npm-adcp-sdk
restore-keys: |
${{ runner.os }}-npm-
- name: Pre-install @adcp/sdk
run: |
npm install -g @adcp/sdk@latest
adcp --version
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e ".[dev]"
- name: Boot sales-proposal-mode seller
run: |
ADCP_PORT=3003 python -m examples.sales_proposal_mode_seller.src.app &
SELLER_PID=$!
echo "SELLER_PID=$SELLER_PID" >> "$GITHUB_ENV"
for i in $(seq 1 60); do
HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" --max-time 1 \
http://127.0.0.1:3003/mcp 2>/dev/null) || HTTP_CODE="000"
if [ "$HTTP_CODE" != "000" ]; then
echo "Seller ready (HTTP ${HTTP_CODE}, pid ${SELLER_PID})"
break
fi
if ! kill -0 "$SELLER_PID" 2>/dev/null; then
echo "Seller process died during startup"
exit 1
fi
if [ "$i" -eq 60 ]; then
echo "Seller failed to start within 30s"
kill "$SELLER_PID" 2>/dev/null || true
exit 1
fi
sleep 0.5
done
- name: Run storyboard — proposal_finalize
timeout-minutes: 5
# Full proposal_finalize storyboard. With @adcp/sdk@6.10.0 the
# runner's stateful-chain controller exercises every phase
# (setup, brief, refine, finalize, accept) end-to-end against
# the framework's intercept seam. Earlier SDK versions skipped
# refine/finalize/accept due to a sync_accounts cascade gap
# that 6.10.0 closed (adcp#4053, adcp-client#1146/#1545).
run: |
adcp storyboard run \
http://127.0.0.1:3003/mcp media_buy_seller/proposal_finalize \
--json --allow-http \
> proposal-finalize-storyboard.json
cat proposal-finalize-storyboard.json | head -200
- name: Assert v1.5 dispatch path scenarios pass
run: |
python -c "
import json, sys, pathlib
p = pathlib.Path('proposal-finalize-storyboard.json')
if not p.exists() or p.stat().st_size == 0:
print('storyboard result missing or empty')
sys.exit(1)
with p.open() as f:
d = json.load(f)
# Every phase of the proposal_finalize storyboard must pass:
# setup → brief_with_proposals → refine_proposal →
# finalize_proposal → accept_proposal. Each exercises a
# different framework seam — see the per-scenario hints
# below for what's wired where.
required_passing = {
'media_buy_seller/proposal_finalize/setup',
'media_buy_seller/proposal_finalize/brief_with_proposals',
'media_buy_seller/proposal_finalize/refine_proposal',
'media_buy_seller/proposal_finalize/finalize_proposal',
'media_buy_seller/proposal_finalize/accept_proposal',
}
passed = set()
for track in d.get('tracks', []) or []:
for s in track.get('scenarios', []) or []:
if s.get('overall_passed'):
passed.add(s.get('scenario'))
missing = required_passing - passed
if missing:
# Per-scenario likely-cause hints. A contributor breaks the
# framework finalize wiring; CI tells them what to look at
# rather than just naming a scenario.
hints = {
'media_buy_seller/proposal_finalize/setup': (
'sync_accounts dispatch failed (or the runner stopped '
'applying the sole-stateful-step exemption). See '
'docs/proposals/proposal-manager-v15-design.md § D5.'
),
'media_buy_seller/proposal_finalize/brief_with_proposals': (
'Manager.get_products + framework draft persistence '
'broke. Check maybe_persist_draft_after_get_products '
'in src/adcp/decisioning/proposal_dispatch.py. See '
'docs/proposals/proposal-manager-v15-design.md § D1.'
),
'media_buy_seller/proposal_finalize/refine_proposal': (
'Refine iteration broke. Check refine_products on '
'examples/sales_proposal_mode_seller/src/proposal_manager.py '
'and the maybe_persist_draft_after_get_products '
'overwrite path. See § D1 (refine iteration).'
),
'media_buy_seller/proposal_finalize/finalize_proposal': (
'Finalize interception broke. Check '
'maybe_intercept_finalize in proposal_dispatch.py + '
'manager.finalize_proposal in the example. The seam '
'must commit via store.commit before projecting. See § D2.'
),
'media_buy_seller/proposal_finalize/accept_proposal': (
'create_media_buy(proposal_id=...) or '
'create_media_buy(packages=[...]) broke. Check '
'maybe_hydrate_recipes_for_create_media_buy + '
'mark_proposal_consumed in proposal_dispatch.py and '
'the platform.create_media_buy adapter. See § D3 + D7.'
),
}
print('FAIL: required scenarios did not pass:')
for s in sorted(missing):
hint = hints.get(s, 'no hint registered for this scenario')
print(f' - {s}')
print(f' likely cause: {hint}')
print()
print('--- raw storyboard result ---')
print(json.dumps(d, indent=2))
sys.exit(1)
print('PASS: full proposal_finalize storyboard chain (5/5 phases)')
"
- if: always()
uses: actions/upload-artifact@v4
with:
name: sales-proposal-mode-storyboard-${{ github.run_attempt }}
path: proposal-finalize-storyboard.json
if-no-files-found: warn