From e7d3da7e2a31f02a0bc83e84629e74f06bf9794a Mon Sep 17 00:00:00 2001 From: actae0n <19864268+xpcmdshell@users.noreply.github.com> Date: Fri, 2 Jan 2026 10:59:28 -0800 Subject: [PATCH 1/3] docs: recommend SubprocessExecutor as default, fix internal API usage - Reorder executors.md to recommend SubprocessExecutor first with decision guide - Update all examples from InProcessExecutor to SubprocessExecutor - Replace internal storage.get_*() calls with Session facade APIs - Fix outdated tools.call() syntax in minimal example - Add warnings to InProcessExecutor section about risks - Add branch policy to AGENTS.md (main is protected) Files updated: - README.md, docs/getting-started.md, docs/executors.md - docs/dependencies.md, docs/production.md, docs/skills.md - docs/storage.md, docs/ARCHITECTURE.md - examples/minimal/README.md, AGENTS.md --- AGENTS.md | 14 +++ README.md | 9 +- docs/ARCHITECTURE.md | 31 +++-- docs/dependencies.md | 41 ++++--- docs/executors.md | 238 ++++++++++++++++++++++--------------- docs/getting-started.md | 8 +- docs/production.md | 17 +-- docs/skills.md | 18 ++- docs/storage.md | 48 +++----- examples/minimal/README.md | 29 +++-- 10 files changed, 256 insertions(+), 197 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index df7a4af..fb761f2 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -1,5 +1,19 @@ # Agent Instructions +## Branch Policy + +**The `main` branch is protected.** You cannot push directly to main. + +**ALWAYS create a feature branch before making changes:** +```bash +git checkout -b feature/description-of-work +# ... make changes ... +git push -u origin feature/description-of-work +# Then create a PR +``` + +--- + This project uses **bd** (beads) for issue tracking. Run `bd onboard` to get started. ## Quick Reference diff --git a/README.md b/README.md index 70911f6..3e200cc 100644 --- a/README.md +++ b/README.md @@ -28,10 +28,15 @@ Over time, agents build a library of reliable capabilities. Simple skills become ```python from pathlib import Path from py_code_mode import Session, FileStorage +from py_code_mode.execution import SubprocessExecutor, SubprocessConfig storage = FileStorage(base_path=Path("./data")) -async with Session(storage=storage) as session: +# SubprocessExecutor provides process isolation (recommended) +config = SubprocessConfig(tools_path=Path("./tools")) +executor = SubprocessExecutor(config=config) + +async with Session(storage=storage, executor=executor) as session: # Agent writes code with tools, skills, and artifacts available result = await session.run(''' # Search for existing skills @@ -111,7 +116,7 @@ For MCP server installation, see [Getting Started](./docs/getting-started.md). - **[Skills](./docs/skills.md)** - Creating, composing, and managing workflows - **[Artifacts](./docs/artifacts.md)** - Persistent data storage patterns - **[Dependencies](./docs/dependencies.md)** - Managing Python packages -- **[Executors](./docs/executors.md)** - InProcess, Subprocess, Container execution +- **[Executors](./docs/executors.md)** - Subprocess, Container, InProcess execution - **[Storage](./docs/storage.md)** - File vs Redis storage backends - **[Production](./docs/production.md)** - Deployment and scaling patterns - **[Architecture](./docs/ARCHITECTURE.md)** - System design and separation of concerns diff --git a/docs/ARCHITECTURE.md b/docs/ARCHITECTURE.md index e3c98df..3c636e3 100644 --- a/docs/ARCHITECTURE.md +++ b/docs/ARCHITECTURE.md @@ -2,6 +2,23 @@ This document explains how tools, skills, and artifacts interact across different deployment scenarios. +## Quick Reference + +**Recommended executor for most users:** SubprocessExecutor +- Process isolation, crash recovery, no Docker required +- Used by the MCP server + +**Storage decision:** +- Local development: FileStorage +- Distributed/production: RedisStorage + +**Executor decision:** +- Default/Development: SubprocessExecutor (recommended) +- Untrusted code/Production: ContainerExecutor +- Trusted code + max speed: InProcessExecutor + +--- + ## Core Concepts | Component | Purpose | Format | @@ -669,9 +686,9 @@ Choose storage backend (for skills and artifacts): Choose executor (with tools_path): | - +-- Same-process execution? -> InProcessExecutor(config=InProcessConfig(tools_path=...)) + +-- Default (recommended) -> SubprocessExecutor(config=SubprocessConfig(tools_path=...)) +-- Docker isolation? -> ContainerExecutor(config=ContainerConfig(tools_path=...)) - +-- Subprocess isolation? -> SubprocessExecutor(config=SubprocessConfig(tools_path=...)) + +-- Trusted code + max speed? -> InProcessExecutor(config=InProcessConfig(tools_path=...)) Combine: Session(storage=storage, executor=executor) @@ -1036,12 +1053,12 @@ recipes: # Named presets ## Deployment Checklist -### Local Development (Session + FileStorage + InProcessExecutor) +### Local Development (Session + FileStorage + SubprocessExecutor) - [ ] Create base storage directory for skills and artifacts - [ ] Add YAML tool definitions to separate tools directory - [ ] Add Python skill files to `/skills/` -- [ ] Configure executor: `InProcessConfig(tools_path=Path("./tools"))` -- [ ] Use `Session(storage=FileStorage(base_path=...), executor=InProcessExecutor(config))` +- [ ] Configure executor: `SubprocessConfig(tools_path=Path("./tools"))` +- [ ] Use `Session(storage=FileStorage(base_path=...), executor=SubprocessExecutor(config))` ### Local with Container Isolation (Container + File) - [ ] Build Docker image with py-code-mode installed @@ -1050,12 +1067,12 @@ recipes: # Named presets - [ ] Set `auth_disabled=True` for local development - [ ] Use `Session(storage=FileStorage(...), executor=ContainerExecutor(config))` -### Production (Session + RedisStorage) +### Production (Session + RedisStorage + SubprocessExecutor) - [ ] Provision Redis instance - [ ] Bootstrap skills: `python -m py_code_mode.store bootstrap --target redis://... --prefix myapp:skills` - [ ] Tools stay on filesystem (via executor config) - [ ] Create storage: `RedisStorage(url="redis://...", prefix="myapp")` -- [ ] Configure executor: `InProcessConfig(tools_path=Path("./tools"))` +- [ ] Configure executor: `SubprocessConfig(tools_path=Path("./tools"))` - [ ] Use `Session(storage=storage, executor=executor)` ### Production with Container Isolation diff --git a/docs/dependencies.md b/docs/dependencies.md index 9161131..d56d8e3 100644 --- a/docs/dependencies.md +++ b/docs/dependencies.md @@ -20,22 +20,24 @@ deps.sync() ## Pre-configuring Dependencies -Configure dependencies before session creation for predictable environments: +Configure dependencies via executor config for predictable environments: ```python from pathlib import Path from py_code_mode import Session, FileStorage +from py_code_mode.execution import SubprocessExecutor, SubprocessConfig storage = FileStorage(base_path=Path("./data")) -# Pre-configure before session -deps_store = storage.get_deps_store() -deps_store.add("pandas>=2.0") -deps_store.add("numpy") -deps_store.add("requests") +# Pre-configure deps in executor config +config = SubprocessConfig( + tools_path=Path("./tools"), + deps=["pandas>=2.0", "numpy", "requests"], # Pre-configured dependencies +) +executor = SubprocessExecutor(config=config) -# Auto-sync on session start -async with Session(storage=storage, sync_deps_on_start=True) as session: +# Auto-sync on session start installs pre-configured deps +async with Session(storage=storage, executor=executor, sync_deps_on_start=True) as session: # All pre-configured packages are installed result = await session.run("import pandas; print(pandas.__version__)") ``` @@ -45,11 +47,11 @@ async with Session(storage=storage, sync_deps_on_start=True) as session: For security-sensitive environments, disable runtime package installation: ```python -from py_code_mode.execution import InProcessExecutor, InProcessConfig +from py_code_mode.execution import SubprocessExecutor, SubprocessConfig # Lock down deps - no runtime installation allowed -config = InProcessConfig(allow_runtime_deps=False) -executor = InProcessExecutor(config=config) +config = SubprocessConfig(allow_runtime_deps=False) +executor = SubprocessExecutor(config=config) async with Session(storage=storage, executor=executor) as session: # deps.add() and deps.remove() will raise RuntimeDepsDisabledError @@ -59,7 +61,7 @@ async with Session(storage=storage, executor=executor) as session: ``` This pattern allows you to: -1. Pre-configure allowed dependencies via storage +1. Pre-configure allowed dependencies via executor config (`deps=[...]`) 2. Start session with `sync_deps_on_start=True` to install them 3. Lock down runtime modifications to prevent agent from installing arbitrary packages @@ -128,16 +130,17 @@ This prevents agents from: **Pre-configure for production:** ```python -# Production: pre-configure deps, disable runtime changes -deps_store.add("pandas>=2.0") -deps_store.add("numpy") -config = ContainerConfig(allow_runtime_deps=False) +# Production: pre-configure deps via config, disable runtime changes +config = ContainerConfig( + deps=["pandas>=2.0", "numpy"], + allow_runtime_deps=False +) ``` **Allow runtime for development:** ```python # Development: let agent install as needed -config = InProcessConfig(allow_runtime_deps=True) +config = SubprocessConfig(allow_runtime_deps=True) ``` **Version pinning:** @@ -183,5 +186,5 @@ Different executors handle dependencies differently: - Check network connectivity (for downloading packages) **Runtime deps disabled errors:** -- Pre-configure dependencies via `storage.get_deps_store()` -- Or enable runtime deps in executor config +- Pre-configure dependencies via executor config: `config = SubprocessConfig(deps=["package"])` +- Or enable runtime deps: `config = SubprocessConfig(allow_runtime_deps=True)` diff --git a/docs/executors.md b/docs/executors.md index f3752fd..f491439 100644 --- a/docs/executors.md +++ b/docs/executors.md @@ -1,70 +1,53 @@ # Executors -Executors determine where and how agent code runs. Three backends available: InProcess, Subprocess, and Container. +Executors determine where and how agent code runs. Three backends are available: Subprocess, Container, and InProcess. -## InProcessExecutor (Default) +## Quick Decision Guide -Code runs in the same Python process as your application. Fastest option, no isolation. - -```python -from py_code_mode import Session, FileStorage -from pathlib import Path - -storage = FileStorage(base_path=Path("./data")) - -# InProcessExecutor is the default -async with Session(storage=storage) as session: - result = await session.run(agent_code) ``` +Which executor should I use? -### Configuration +Start here: SubprocessExecutor (recommended default) + - Process isolation, crash recovery, clean environments + - No Docker required + - Used by the MCP server -```python -from pathlib import Path -from py_code_mode.execution import InProcessExecutor, InProcessConfig +Need stronger isolation? → ContainerExecutor + - Untrusted code, production, multi-tenant + - Filesystem and network isolation + - Requires Docker -config = InProcessConfig( - tools_path=Path("./tools"), # Path to YAML tool definitions - deps=["pandas>=2.0", "numpy"], # Pre-configured dependencies - default_timeout=30.0, # Default execution timeout in seconds - allow_runtime_deps=True # Allow agents to install packages at runtime -) - -executor = InProcessExecutor(config=config) - -async with Session(storage=storage, executor=executor) as session: - result = await session.run(agent_code) +Need maximum speed AND trust the code completely? → InProcessExecutor + - No isolation (runs in your process) + - Only for trusted code you control ``` -### When to Use - -- ✓ Development and prototyping -- ✓ Trusted agent code -- ✓ Performance-critical applications -- ✓ Simple deployment requirements - -### When NOT to Use - -- ✗ Untrusted agent code -- ✗ Need process isolation -- ✗ Multi-tenant environments -- ✗ Resource limiting requirements +| Requirement | Subprocess | Container | InProcess | +|-------------|------------|-----------|-----------| +| **Recommended for most users** | **Yes** | | | +| Process isolation | Yes | Yes | No | +| Crash recovery | Yes | Yes | No | +| Container isolation | No | Yes | No | +| No Docker required | Yes | No | Yes | +| Resource limits | Partial | Full | No | +| Untrusted code | No | Yes | No | --- -## SubprocessExecutor +## SubprocessExecutor (Recommended) -Code runs in a Jupyter kernel subprocess. Process-level isolation without Docker overhead. +Code runs in a Jupyter kernel subprocess. Process-level isolation without Docker overhead. **This is the recommended starting point for most users.** ```python from pathlib import Path +from py_code_mode import Session, FileStorage from py_code_mode.execution import SubprocessExecutor, SubprocessConfig +storage = FileStorage(base_path=Path("./data")) + config = SubprocessConfig( tools_path=Path("./tools"), # Path to YAML tool definitions - python_version="3.11", # Python version for the subprocess - default_timeout=120.0, # Execution timeout - allow_runtime_deps=False # Lock down dependency installation + default_timeout=120.0, # Execution timeout ) executor = SubprocessExecutor(config) @@ -73,12 +56,13 @@ async with Session(storage=storage, executor=executor) as session: result = await session.run(agent_code) ``` -### Features +### Why SubprocessExecutor is the Default Choice -- **Process isolation** - Agent code runs in separate process -- **Clean environment** - Fresh venv created for each executor -- **Crash recovery** - Main process unaffected by agent crashes -- **Resource separation** - Subprocess can be monitored/limited separately +- **Crash recovery** - If agent code crashes, your main process continues running +- **Clean environment** - Fresh virtual environment for predictable behavior +- **Process isolation** - Agent code can't interfere with your application state +- **No Docker required** - Works everywhere Python runs +- **Production-ready** - Used by the MCP server for Claude Code integration ### Configuration Options @@ -95,10 +79,10 @@ SubprocessConfig( ### When to Use -- ✓ Need isolation without Docker complexity -- ✓ Development on systems without Docker -- ✓ Moderate trust in agent code -- ✓ Want crash recovery without containers +- **Development and prototyping** - Isolated environment prevents accidents +- **MCP server deployments** - Default for Claude Code integration +- **CI/CD pipelines** - No Docker dependency +- **Any situation where you want safety without complexity** ### Limitations @@ -111,18 +95,22 @@ SubprocessConfig( ## ContainerExecutor -Code runs in a Docker container. Full isolation for untrusted code. +Code runs in a Docker container. Full isolation for untrusted code and production deployments. ```python from pathlib import Path +import os +from py_code_mode import Session, FileStorage from py_code_mode.execution import ContainerExecutor, ContainerConfig +storage = FileStorage(base_path=Path("./data")) + config = ContainerConfig( tools_path=Path("./tools"), # Path to YAML tool definitions (mounted into container) - deps=["requests"], # Pre-configured dependencies - timeout=60.0, # Execution timeout - allow_runtime_deps=False, # Lock down deps for security - auth_token="your-secret-token", # API authentication (required for production) + deps=["requests"], # Pre-configured dependencies + timeout=60.0, # Execution timeout + allow_runtime_deps=False, # Lock down deps for security + auth_token=os.getenv("CONTAINER_AUTH_TOKEN"), # Required for production ) executor = ContainerExecutor(config) @@ -135,6 +123,7 @@ For local development, you can disable auth: ```python config = ContainerConfig( + tools_path=Path("./tools"), auth_disabled=True, # Only for local development! ) ``` @@ -151,14 +140,14 @@ config = ContainerConfig( ```python ContainerConfig( tools_path=Path("./tools"), # Path to YAML tool definitions (mounted) - deps=["requests"], # Pre-configured dependencies - timeout=60.0, # Execution timeout - allow_runtime_deps=False, # Lock down package installation - auth_token="secret", # Bearer token for API auth (production) - auth_disabled=False, # Set True for local dev only (no auth) - network_disabled=False, # Disable container network access - memory_limit="512m", # Container memory limit - cpu_quota=None # CPU quota (default: no limit) + deps=["requests"], # Pre-configured dependencies + timeout=60.0, # Execution timeout + allow_runtime_deps=False, # Lock down package installation + auth_token="secret", # Bearer token for API auth (production) + auth_disabled=False, # Set True for local dev only (no auth) + network_disabled=False, # Disable container network access + memory_limit="512m", # Container memory limit + cpu_quota=None # CPU quota (default: no limit) ) ``` @@ -188,11 +177,10 @@ docker build -t py-code-mode:tools -f docker/Dockerfile.tools . ### When to Use -- ✓ Untrusted agent code -- ✓ Production deployments -- ✓ Multi-tenant environments -- ✓ Need resource isolation -- ✓ Compliance/security requirements +- **Untrusted agent code** - Users you don't control +- **Production deployments** - Maximum security +- **Multi-tenant environments** - Tenant isolation +- **Compliance requirements** - Audit-friendly isolation ### Limitations @@ -203,44 +191,100 @@ docker build -t py-code-mode:tools -f docker/Dockerfile.tools . --- -## Choosing an Executor +## InProcessExecutor + +Code runs in the same Python process as your application. Fastest option, but provides **no isolation**. + +> **Warning:** InProcessExecutor runs agent code directly in your process. A crash in agent code crashes your application. Only use this when you fully trust the code and need maximum performance. + +```python +from pathlib import Path +from py_code_mode import Session, FileStorage +from py_code_mode.execution import InProcessExecutor, InProcessConfig + +storage = FileStorage(base_path=Path("./data")) + +config = InProcessConfig( + tools_path=Path("./tools"), # Path to YAML tool definitions + deps=["pandas>=2.0", "numpy"], # Pre-configured dependencies + default_timeout=30.0, # Default execution timeout in seconds + allow_runtime_deps=True # Allow agents to install packages at runtime +) + +executor = InProcessExecutor(config) + +async with Session(storage=storage, executor=executor) as session: + result = await session.run(agent_code) +``` + +### Configuration Options + +```python +InProcessConfig( + tools_path=Path("./tools"), # Path to YAML tool definitions + deps=["pandas>=2.0", "numpy"], # Pre-configured dependencies + default_timeout=30.0, # Default execution timeout in seconds + allow_runtime_deps=True # Allow agents to install packages at runtime +) +``` + +### When to Use -| Requirement | InProcess | Subprocess | Container | -|-------------|-----------|------------|-----------| -| Fastest execution | ✓ | | | -| Process isolation | | ✓ | ✓ | -| Container isolation | | | ✓ | -| No Docker required | ✓ | ✓ | | -| Crash recovery | | ✓ | ✓ | -| Resource limits | | Partial | ✓ | -| Untrusted code | | | ✓ | -| Simple deployment | ✓ | ✓ | | +- **Trusted code only** - Code you wrote or fully control +- **Performance-critical** - When subprocess overhead matters +- **Debugging** - Easier to debug in single process +- **Simple scripts** - Quick experiments where isolation doesn't matter + +### When NOT to Use + +- **Untrusted agent code** - Use ContainerExecutor instead +- **Production with user-generated code** - Use ContainerExecutor +- **Long-running services** - Crashes take down your app +- **Multi-tenant** - No isolation between tenants + +### Risks + +| Risk | Consequence | +|------|-------------| +| Agent code crashes | Your entire application crashes | +| Agent code hangs | Your application may hang | +| Agent installs malicious package | Package runs in your process | +| Agent modifies global state | Affects your application state | + +--- ## Switching Executors -Executors are interchangeable - same code works with any executor: +Executors are interchangeable - the same Session code works with any executor: ```python from pathlib import Path import os +from py_code_mode import Session, FileStorage +from py_code_mode.execution import ( + SubprocessExecutor, SubprocessConfig, + ContainerExecutor, ContainerConfig, + InProcessExecutor, InProcessConfig, +) +storage = FileStorage(base_path=Path("./data")) tools_path = Path("./tools") -# Development: InProcess for speed -config = InProcessConfig(tools_path=tools_path) -executor = InProcessExecutor(config) -async with Session(storage=storage, executor=executor) as session: - result = await session.run(code) - -# Testing: Subprocess for isolation +# Development: Subprocess for safety (recommended) config = SubprocessConfig(tools_path=tools_path) executor = SubprocessExecutor(config) async with Session(storage=storage, executor=executor) as session: result = await session.run(code) -# Production: Container for security (with auth) +# Production: Container for maximum security config = ContainerConfig(tools_path=tools_path, auth_token=os.getenv("AUTH_TOKEN")) executor = ContainerExecutor(config) +async with Session(storage=storage, executor=executor) as session: + result = await session.run(code) + +# Trusted code only: InProcess for speed +config = InProcessConfig(tools_path=tools_path) +executor = InProcessExecutor(config) async with Session(storage=storage, executor=executor) as session: result = await session.run(code) ``` @@ -270,16 +314,16 @@ For ContainerExecutor and SubprocessExecutor, cleanup includes: ## Best Practices **Development:** -- Use InProcessExecutor for fast iteration -- Switch to SubprocessExecutor when testing isolation +- Use SubprocessExecutor for safe iteration with crash recovery +- Switch to InProcessExecutor only if debugging requires it **Production:** - Use ContainerExecutor for untrusted code +- Use SubprocessExecutor for trusted internal agents - Pre-configure dependencies with `allow_runtime_deps=False` - Set appropriate timeouts based on expected workload - Monitor executor health and resource usage **Testing:** -- Test with all executors to ensure compatibility -- Use SubprocessExecutor for integration tests +- Test with SubprocessExecutor to catch isolation issues early - Use ContainerExecutor to validate production behavior diff --git a/docs/getting-started.md b/docs/getting-started.md index e2a9adf..c7c93b1 100644 --- a/docs/getting-started.md +++ b/docs/getting-started.md @@ -45,14 +45,14 @@ The storage directory will contain `skills/` and `artifacts/` subdirectories. ```python from pathlib import Path from py_code_mode import Session, FileStorage -from py_code_mode.execution import InProcessConfig, InProcessExecutor +from py_code_mode.execution import SubprocessConfig, SubprocessExecutor # Create storage backend for skills and artifacts storage = FileStorage(base_path=Path("./data")) -# Configure executor with tools path -config = InProcessConfig(tools_path=Path("./tools")) -executor = InProcessExecutor(config=config) +# Configure executor with tools path (SubprocessExecutor recommended for most use cases) +config = SubprocessConfig(tools_path=Path("./tools")) +executor = SubprocessExecutor(config=config) # Create a session async with Session(storage=storage, executor=executor) as session: diff --git a/docs/production.md b/docs/production.md index d728619..025ab6f 100644 --- a/docs/production.md +++ b/docs/production.md @@ -18,20 +18,15 @@ from py_code_mode.execution import ContainerExecutor, ContainerConfig # Shared skill library storage = RedisStorage(url=os.getenv("REDIS_URL"), prefix="production") -# Isolated execution with authentication +# Isolated execution with authentication and pre-configured deps config = ContainerConfig( timeout=60.0, allow_runtime_deps=False, # Lock down package installation auth_token=os.getenv("CONTAINER_AUTH_TOKEN"), # Required for production + deps=["pandas>=2.0", "numpy", "requests"], # Pre-configured dependencies ) executor = ContainerExecutor(config) -# Pre-configure dependencies once -deps_store = storage.get_deps_store() -deps_store.add("pandas>=2.0") -deps_store.add("numpy") -deps_store.add("requests") - async with Session(storage=storage, executor=executor, sync_deps_on_start=True) as session: result = await session.run(agent_code) ``` @@ -63,13 +58,9 @@ Prevent agents from installing arbitrary packages: ```python config = ContainerConfig( - allow_runtime_deps=False # Block runtime installation + allow_runtime_deps=False, # Block runtime installation + deps=["pandas>=2.0", "requests>=2.28.0"], # Pre-configure allowed packages ) - -# Pre-configure allowed packages via storage -deps_store = storage.get_deps_store() -deps_store.add("pandas>=2.0") -deps_store.add("requests>=2.28.0") ``` ### 3. Use Container Isolation diff --git a/docs/skills.md b/docs/skills.md index 048c2b4..4654d4d 100644 --- a/docs/skills.md +++ b/docs/skills.md @@ -286,21 +286,17 @@ async with Session(storage=storage) as session: ) ``` -For host-side skill management without a session, use `PythonSkill.from_source()`: +For advanced use cases where you need to create skills outside of agent code execution, use `session.add_skill()`: ```python -from py_code_mode.skills import PythonSkill - -skill = PythonSkill.from_source( - name="greet", - source='''def run(name: str = "World") -> str: +async with Session(storage=storage, executor=executor) as session: + await session.add_skill( + name="greet", + source='''def run(name: str = "World") -> str: return f"Hello, {name}!" ''', - description="Generate a greeting message" -) - -skill_store = storage.get_skill_store() -skill_store.save(skill) + description="Generate a greeting message" + ) ``` ## Best Practices diff --git a/docs/storage.md b/docs/storage.md index 36ba6a1..fab8e78 100644 --- a/docs/storage.md +++ b/docs/storage.md @@ -129,42 +129,24 @@ tenant_b_storage = RedisStorage(url="redis://localhost:6379", prefix="tenant-b") ## Migrating Between Storage Backends -### File to Redis - -```python -from pathlib import Path -from py_code_mode import FileStorage, RedisStorage - -# Load from file storage -file_storage = FileStorage(base_path=Path("./data")) -file_skill_store = file_storage.get_skill_store() -skills = file_skill_store.list_all() +Use the CLI tools for migration (recommended): -# Save to Redis storage -redis_storage = RedisStorage(url="redis://localhost:6379", prefix="production") -redis_skill_store = redis_storage.get_skill_store() +### File to Redis -for skill in skills: - redis_skill_store.save(skill) +```bash +python -m py_code_mode.store bootstrap \ + --source ./skills \ + --target redis://localhost:6379 \ + --prefix production ``` ### Redis to File -```python -from pathlib import Path -from py_code_mode import FileStorage, RedisStorage - -# Load from Redis -redis_storage = RedisStorage(url="redis://localhost:6379", prefix="production") -redis_skill_store = redis_storage.get_skill_store() -skills = redis_skill_store.list_all() - -# Save to file storage -file_storage = FileStorage(base_path=Path("./backup")) -file_skill_store = file_storage.get_skill_store() - -for skill in skills: - file_skill_store.save(skill) +```bash +python -m py_code_mode.store pull \ + --target redis://localhost:6379 \ + --prefix production \ + --dest ./skills-backup ``` --- @@ -244,7 +226,7 @@ Storage backends implement a common protocol, making them interchangeable: ```python from pathlib import Path from py_code_mode import Session, FileStorage, RedisStorage -from py_code_mode.execution import InProcessConfig, InProcessExecutor +from py_code_mode.execution import SubprocessConfig, SubprocessExecutor def create_session(storage_type: str, tools_path: Path): # Choose storage based on environment @@ -254,8 +236,8 @@ def create_session(storage_type: str, tools_path: Path): storage = RedisStorage(url="redis://localhost:6379", prefix="app") # Executor config is the same for both storage types - config = InProcessConfig(tools_path=tools_path) - executor = InProcessExecutor(config=config) + config = SubprocessConfig(tools_path=tools_path) + executor = SubprocessExecutor(config=config) return Session(storage=storage, executor=executor) ``` diff --git a/examples/minimal/README.md b/examples/minimal/README.md index 04e2d46..c40d36f 100644 --- a/examples/minimal/README.md +++ b/examples/minimal/README.md @@ -7,7 +7,7 @@ A simple Claude-powered agent with CLI tool execution. Shows the core py-code-mo The agent can: 1. Receive a task from you 2. Write Python code to accomplish it -3. Execute code with access to CLI tools via `tools.call()` +3. Execute code with access to CLI tools via `tools.(...)` 4. Iterate on results until it has an answer ## Prerequisites @@ -62,30 +62,37 @@ description: HTTP client for making requests args: "-s {url}" ``` -### 2. Storage + Session +### 2. Storage + Executor + Session -Tools and skills are loaded via storage abstraction: +Storage handles skills and artifacts. Tools come from executor config: ```python from pathlib import Path from py_code_mode import Session, FileStorage +from py_code_mode.execution import SubprocessExecutor, SubprocessConfig -# File-based storage +# File-based storage for skills and artifacts storage = FileStorage(base_path=Path("./configs")) -# Create session (defaults to in-process execution) -async with Session(storage=storage) as session: +# Executor config loads tools from tools_path +config = SubprocessConfig(tools_path=Path("./configs/tools")) +executor = SubprocessExecutor(config=config) + +async with Session(storage=storage, executor=executor) as session: result = await session.run('tools.curl(url="...")') ``` -Or with Redis: +Or with Redis storage: ```python from py_code_mode import Session, RedisStorage +from py_code_mode.execution import SubprocessExecutor, SubprocessConfig storage = RedisStorage(url="redis://localhost:6379", prefix="myapp") +config = SubprocessConfig(tools_path=Path("./tools")) +executor = SubprocessExecutor(config=config) -async with Session(storage=storage) as session: +async with Session(storage=storage, executor=executor) as session: result = await session.run('tools.curl(url="...")') ``` @@ -94,11 +101,11 @@ async with Session(storage=storage) as session: When the agent writes code, it has access to `tools`: ```python -# Pythonic style (recommended) +# Call tools with keyword arguments response = tools.curl(url="https://api.example.com") -# Or dict-based style -response = tools.call("curl", {"url": "https://api.example.com"}) +# Or use recipes for common patterns +response = tools.curl.get(url="https://api.example.com") import json data = json.loads(response) From e14ecf2044b6abd4aea94e0de47a33df17d91835 Mon Sep 17 00:00:00 2001 From: actae0n <19864268+xpcmdshell@users.noreply.github.com> Date: Fri, 2 Jan 2026 11:11:10 -0800 Subject: [PATCH 2/3] docs: add CLI reference, Session API reference, and integrations guide New documentation: - docs/cli-reference.md: MCP server flags and Store CLI commands - docs/session-api.md: Complete Session method reference - docs/integrations.md: Framework integration patterns Updated: - README.md: Reorganized documentation links, added new docs --- README.md | 12 +- docs/cli-reference.md | 267 +++++++++++++++++++++++++ docs/integrations.md | 252 +++++++++++++++++++++++ docs/session-api.md | 455 ++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 985 insertions(+), 1 deletion(-) create mode 100644 docs/cli-reference.md create mode 100644 docs/integrations.md create mode 100644 docs/session-api.md diff --git a/README.md b/README.md index 3e200cc..54ef238 100644 --- a/README.md +++ b/README.md @@ -111,21 +111,31 @@ For MCP server installation, see [Getting Started](./docs/getting-started.md). ## Documentation +**Getting Started:** - **[Getting Started](./docs/getting-started.md)** - Installation, first session, basic usage +- **[Session API](./docs/session-api.md)** - Complete Session method reference +- **[CLI Reference](./docs/cli-reference.md)** - MCP server and store CLI commands + +**Core Concepts:** - **[Tools](./docs/tools.md)** - CLI, MCP, and REST API adapters - **[Skills](./docs/skills.md)** - Creating, composing, and managing workflows - **[Artifacts](./docs/artifacts.md)** - Persistent data storage patterns - **[Dependencies](./docs/dependencies.md)** - Managing Python packages + +**Deployment:** - **[Executors](./docs/executors.md)** - Subprocess, Container, InProcess execution - **[Storage](./docs/storage.md)** - File vs Redis storage backends +- **[Integrations](./docs/integrations.md)** - Framework integration patterns - **[Production](./docs/production.md)** - Deployment and scaling patterns + +**Reference:** - **[Architecture](./docs/ARCHITECTURE.md)** - System design and separation of concerns ## Examples - **[minimal/](./examples/minimal/)** - Simple agent implementation (~100 lines) - **[subprocess/](./examples/subprocess/)** - Process isolation without Docker -- **[autogen-direct/](./examples/autogen-direct/)** - AutoGen framework integration +- **[deps/](./examples/deps/)** - Dependency management patterns - **[azure-container-apps/](./examples/azure-container-apps/)** - Production deployment ## License diff --git a/docs/cli-reference.md b/docs/cli-reference.md new file mode 100644 index 0000000..e83d418 --- /dev/null +++ b/docs/cli-reference.md @@ -0,0 +1,267 @@ +# CLI Reference + +Command-line tools for py-code-mode. + +## MCP Server + +The MCP server exposes py-code-mode to Claude Code and other MCP clients. + +### Installation + +```bash +# Add to Claude Code +claude mcp add py-code-mode -- uvx --from git+https://github.com/xpcmdshell/py-code-mode.git@v0.9.0 py-code-mode-mcp --base ~/.code-mode +``` + +### Usage + +```bash +py-code-mode-mcp [OPTIONS] +``` + +### Options + +| Flag | Description | Default | +|------|-------------|---------| +| `--base PATH` | Base directory with `tools/`, `skills/`, `artifacts/` subdirs | - | +| `--storage PATH` | Path to storage directory (skills, artifacts) | - | +| `--tools PATH` | Path to tools directory (YAML definitions) | - | +| `--redis URL` | Redis URL for storage | - | +| `--prefix PREFIX` | Redis key prefix | `py-code-mode` | +| `--timeout SECONDS` | Code execution timeout | unlimited | +| `--no-runtime-deps` | Disable runtime dependency installation | false | +| `--no-sync-deps` | Don't install pre-configured deps on startup | false | + +### Examples + +```bash +# Base directory (auto-discovers tools/, skills/, artifacts/) +py-code-mode-mcp --base ~/.code-mode + +# Explicit storage + tools paths +py-code-mode-mcp --storage ./data --tools ./project/tools + +# Redis storage with timeout +py-code-mode-mcp --redis redis://localhost:6379 --prefix my-agent --timeout 60 + +# Production: locked down deps +py-code-mode-mcp --base ~/.code-mode --no-runtime-deps +``` + +### Exposed MCP Tools + +When running, the server exposes these tools to MCP clients: + +| Tool | Description | +|------|-------------| +| `run_code` | Execute Python with access to tools, skills, artifacts, deps | +| `list_tools` | List available tools | +| `search_tools` | Semantic search for tools | +| `list_skills` | List available skills | +| `search_skills` | Semantic search for skills | +| `create_skill` | Save a new skill | +| `delete_skill` | Remove a skill | +| `list_artifacts` | List saved artifacts | +| `list_deps` | List configured dependencies | +| `add_dep` | Add and install a dependency (if `--no-runtime-deps` not set) | +| `remove_dep` | Remove a dependency (if `--no-runtime-deps` not set) | + +--- + +## Store CLI + +Manage skills, tools, and dependencies in Redis stores. + +### Usage + +```bash +python -m py_code_mode.cli.store [OPTIONS] +``` + +### Commands + +#### bootstrap + +Push skills, tools, or deps from local files to a store. + +```bash +python -m py_code_mode.cli.store bootstrap \ + --source PATH \ + --target URL \ + --prefix PREFIX \ + [--type skills|tools|deps] \ + [--clear] \ + [--deps "pkg1" "pkg2"] +``` + +| Option | Description | Default | +|--------|-------------|---------| +| `--source PATH` | Source directory or requirements file | required | +| `--target URL` | Target store URL (e.g., `redis://localhost:6379`) | required | +| `--prefix PREFIX` | Key prefix for items | `skills` | +| `--type TYPE` | Type of items: `skills`, `tools`, or `deps` | `skills` | +| `--clear` | Remove existing items before adding | false | +| `--deps` | Inline package specs (for deps only) | - | + +**Examples:** + +```bash +# Bootstrap skills +python -m py_code_mode.cli.store bootstrap \ + --source ./skills \ + --target redis://localhost:6379 \ + --prefix my-agent + +# Bootstrap tools +python -m py_code_mode.cli.store bootstrap \ + --source ./tools \ + --target redis://localhost:6379 \ + --prefix my-agent \ + --type tools + +# Bootstrap deps from requirements file +python -m py_code_mode.cli.store bootstrap \ + --source requirements.txt \ + --target redis://localhost:6379 \ + --prefix my-agent \ + --type deps + +# Bootstrap deps inline +python -m py_code_mode.cli.store bootstrap \ + --target redis://localhost:6379 \ + --prefix my-agent \ + --type deps \ + --deps "requests>=2.31" "pandas>=2.0" + +# Replace all existing skills +python -m py_code_mode.cli.store bootstrap \ + --source ./skills \ + --target redis://localhost:6379 \ + --prefix my-agent \ + --clear +``` + +#### list + +List items in a store. + +```bash +python -m py_code_mode.cli.store list \ + --target URL \ + --prefix PREFIX \ + [--type skills|tools|deps] +``` + +**Examples:** + +```bash +# List skills +python -m py_code_mode.cli.store list \ + --target redis://localhost:6379 \ + --prefix my-agent + +# List tools +python -m py_code_mode.cli.store list \ + --target redis://localhost:6379 \ + --prefix my-agent \ + --type tools + +# List deps +python -m py_code_mode.cli.store list \ + --target redis://localhost:6379 \ + --prefix my-agent \ + --type deps +``` + +#### pull + +Retrieve skills from a store to local files. + +```bash +python -m py_code_mode.cli.store pull \ + --target URL \ + --prefix PREFIX \ + --dest PATH +``` + +**Example:** + +```bash +# Pull skills to review agent-created ones +python -m py_code_mode.cli.store pull \ + --target redis://localhost:6379 \ + --prefix my-agent \ + --dest ./skills-from-redis +``` + +#### diff + +Compare local skills vs remote store. + +```bash +python -m py_code_mode.cli.store diff \ + --source PATH \ + --target URL \ + --prefix PREFIX +``` + +**Example:** + +```bash +# See what agent added or changed +python -m py_code_mode.cli.store diff \ + --source ./skills \ + --target redis://localhost:6379 \ + --prefix my-agent +``` + +Output shows: +- `+ name` - Agent-created (in store, not local) +- `- name` - Removed from store (local only) +- `~ name` - Modified +- `= name` - Unchanged + +--- + +## CI/CD Patterns + +### Deploy Skills to Production + +```bash +# In CI pipeline +python -m py_code_mode.cli.store bootstrap \ + --source ./skills \ + --target $REDIS_URL \ + --prefix production \ + --clear +``` + +### Review Agent Creations + +```bash +# Pull what agents created +python -m py_code_mode.cli.store pull \ + --target $REDIS_URL \ + --prefix production \ + --dest ./review + +# Compare to source +python -m py_code_mode.cli.store diff \ + --source ./skills \ + --target $REDIS_URL \ + --prefix production +``` + +### Pre-configure Dependencies + +```bash +# Bootstrap deps to Redis +python -m py_code_mode.cli.store bootstrap \ + --source requirements.txt \ + --target $REDIS_URL \ + --prefix production \ + --type deps + +# Then run MCP server with --no-runtime-deps to lock it down +py-code-mode-mcp --redis $REDIS_URL --prefix production --no-runtime-deps +``` diff --git a/docs/integrations.md b/docs/integrations.md new file mode 100644 index 0000000..e0cc2a1 --- /dev/null +++ b/docs/integrations.md @@ -0,0 +1,252 @@ +# Framework Integrations + +py-code-mode integrates with agent frameworks via two approaches: MCP protocol or direct SDK. + +## Integration Approaches + +| Approach | Best For | Complexity | +|----------|----------|------------| +| **MCP** | Any MCP-capable framework, quick setup | Low | +| **Direct SDK** | Custom control, lower latency | Medium | + +--- + +## MCP Integration + +The MCP server exposes py-code-mode as a standard Model Context Protocol server. Any MCP-capable framework can connect. + +### Setup + +```bash +# Install and run MCP server +py-code-mode-mcp --base ~/.code-mode +``` + +### Available Tools + +The MCP server exposes these tools: + +| Tool | Purpose | +|------|---------| +| `run_code` | Execute Python with access to tools/skills/artifacts | +| `list_tools` / `search_tools` | Discover available tools | +| `list_skills` / `search_skills` | Discover available skills | +| `create_skill` / `delete_skill` | Manage skills | +| `list_artifacts` | List saved data | +| `list_deps` / `add_dep` / `remove_dep` | Manage dependencies | + +### Framework Examples + +#### Claude Code + +```bash +claude mcp add py-code-mode -- uvx --from git+https://github.com/xpcmdshell/py-code-mode.git@v0.9.0 py-code-mode-mcp --base ~/.code-mode +``` + +#### Generic MCP Client + +```python +import subprocess +import json + +# Start MCP server as subprocess +proc = subprocess.Popen( + ["py-code-mode-mcp", "--base", "~/.code-mode"], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, +) + +# Send MCP messages via stdio +# (Use your framework's MCP client library for proper protocol handling) +``` + +--- + +## Direct SDK Integration + +For frameworks that don't support MCP or need lower latency, use the Session API directly. + +### Basic Pattern + +```python +from pathlib import Path +from py_code_mode import Session, FileStorage +from py_code_mode.execution import SubprocessExecutor, SubprocessConfig + +# Create session +storage = FileStorage(base_path=Path("./data")) +config = SubprocessConfig(tools_path=Path("./tools")) +executor = SubprocessExecutor(config=config) + +async def execute_agent_code(code: str) -> str: + """Execute code from your agent framework.""" + async with Session(storage=storage, executor=executor) as session: + result = await session.run(code) + + if result.is_ok: + output = str(result.value) if result.value is not None else "" + if result.stdout: + output = f"{result.stdout}\n{output}" if output else result.stdout + return output + else: + return f"Error: {result.error}" +``` + +### Persistent Session Pattern + +For agents that make multiple code execution calls, keep the session open: + +```python +class CodeExecutionTool: + """Reusable code execution tool for agent frameworks.""" + + def __init__(self, storage_path: Path, tools_path: Path): + self.storage = FileStorage(base_path=storage_path) + config = SubprocessConfig(tools_path=tools_path) + self.executor = SubprocessExecutor(config=config) + self.session: Session | None = None + + async def start(self): + """Initialize session. Call before agent loop.""" + self.session = Session(storage=self.storage, executor=self.executor) + await self.session.start() + + async def stop(self): + """Cleanup. Call after agent loop.""" + if self.session: + await self.session.close() + self.session = None + + async def run(self, code: str, timeout: float = 30.0) -> str: + """Execute code. Variables persist across calls.""" + if not self.session: + raise RuntimeError("Call start() first") + + result = await self.session.run(code, timeout=timeout) + + if result.is_ok: + output = str(result.value) if result.value is not None else "" + if result.stdout: + output = f"{result.stdout}\n{output}" if output else result.stdout + return output or "(no output)" + else: + return f"Error: {result.error}" + + async def __aenter__(self): + await self.start() + return self + + async def __aexit__(self, *args): + await self.stop() +``` + +**Usage:** + +```python +async with CodeExecutionTool(Path("./data"), Path("./tools")) as tool: + # Variables persist across calls + await tool.run("x = 42") + result = await tool.run("x * 2") # Returns "84" +``` + +### Tool Definition for LLM + +When registering with your framework, provide a clear tool description: + +```python +TOOL_DESCRIPTION = """Execute Python code with access to tools, skills, and artifacts. + +NAMESPACES: +- tools.* - Call registered tools (e.g., tools.curl.get(url="...")) +- skills.* - Invoke reusable workflows (e.g., skills.invoke("fetch_json", url="...")) +- artifacts.* - Persist data (e.g., artifacts.save("key", data)) +- deps.* - Manage packages (e.g., deps.add("pandas")) + +Variables persist across calls within the same session. + +WORKFLOW: +1. Search for existing skills: skills.search("your task") +2. If found, invoke it: skills.invoke("skill_name", arg=value) +3. Otherwise, write code using tools +4. Save successful workflows as skills for reuse +""" +``` + +--- + +## Redis Backend for Multi-Agent + +When running multiple agent instances, use Redis for shared skill library: + +```python +from py_code_mode import Session, RedisStorage +from py_code_mode.execution import SubprocessExecutor, SubprocessConfig + +# All instances share skills via Redis +storage = RedisStorage(url="redis://localhost:6379", prefix="my-agents") +config = SubprocessConfig(tools_path=Path("./tools")) +executor = SubprocessExecutor(config=config) + +async with Session(storage=storage, executor=executor) as session: + # Skills created by any agent are available to all + result = await session.run(code) +``` + +--- + +## Production Patterns + +### Container Isolation + +For untrusted agent code, use ContainerExecutor: + +```python +from py_code_mode.execution import ContainerExecutor, ContainerConfig + +config = ContainerConfig( + tools_path=Path("./tools"), + auth_token=os.getenv("CONTAINER_AUTH_TOKEN"), + timeout=60.0, + allow_runtime_deps=False, +) +executor = ContainerExecutor(config) +``` + +### Timeout Handling + +```python +async def safe_execute(session: Session, code: str) -> str: + try: + result = await session.run(code, timeout=30.0) + if result.is_ok: + return str(result.value) + return f"Error: {result.error}" + except TimeoutError: + return "Error: Execution timed out" +``` + +### Error Recovery + +```python +async def execute_with_retry(session: Session, code: str, retries: int = 2) -> str: + for attempt in range(retries + 1): + result = await session.run(code) + if result.is_ok: + return str(result.value) + + # Reset on failure to clear potentially corrupted state + if attempt < retries: + await session.reset() + + return f"Error after {retries + 1} attempts: {result.error}" +``` + +--- + +## Examples + +See working integration examples: + +- **[examples/minimal/](../examples/minimal/)** - Simple agent (~100 lines) +- **[examples/subprocess/](../examples/subprocess/)** - SubprocessExecutor usage +- **[examples/azure-container-apps/](../examples/azure-container-apps/)** - Production deployment diff --git a/docs/session-api.md b/docs/session-api.md new file mode 100644 index 0000000..a5b7a06 --- /dev/null +++ b/docs/session-api.md @@ -0,0 +1,455 @@ +# Session API Reference + +Complete reference for the Session class - the primary interface for py-code-mode. + +## Overview + +Session wraps a storage backend and executor, providing a unified API for code execution with tools, skills, and artifacts. + +```python +from pathlib import Path +from py_code_mode import Session, FileStorage +from py_code_mode.execution import SubprocessExecutor, SubprocessConfig + +storage = FileStorage(base_path=Path("./data")) +config = SubprocessConfig(tools_path=Path("./tools")) +executor = SubprocessExecutor(config=config) + +async with Session(storage=storage, executor=executor) as session: + result = await session.run("tools.curl.get(url='https://api.github.com')") +``` + +--- + +## Constructor + +```python +Session( + storage: StorageBackend, + executor: Executor | None = None, + sync_deps_on_start: bool = False, +) +``` + +| Parameter | Type | Description | +|-----------|------|-------------| +| `storage` | `StorageBackend` | Required. FileStorage or RedisStorage instance. | +| `executor` | `Executor` | Optional. Defaults to SubprocessExecutor if not provided. | +| `sync_deps_on_start` | `bool` | If True, install pre-configured deps when session starts. | + +--- + +## Lifecycle Methods + +### start() + +Initialize the executor and prepare for code execution. + +```python +async def start(self) -> None +``` + +Called automatically when using `async with`. Only call manually if not using context manager. + +### close() + +Release session resources. + +```python +async def close(self) -> None +``` + +Called automatically when exiting `async with`. Only call manually if not using context manager. + +### reset() + +Reset the execution environment, clearing user-defined variables while preserving namespaces. + +```python +async def reset(self) -> None +``` + +**Example:** + +```python +async with Session(storage=storage, executor=executor) as session: + await session.run("x = 42") + await session.reset() + result = await session.run("x") # Error: x is not defined +``` + +--- + +## Code Execution + +### run() + +Execute Python code and return the result. + +```python +async def run( + self, + code: str, + timeout: float | None = None +) -> ExecutionResult +``` + +| Parameter | Type | Description | +|-----------|------|-------------| +| `code` | `str` | Python code to execute | +| `timeout` | `float` | Optional timeout in seconds (overrides default) | + +**Returns:** `ExecutionResult` with: +- `value` - Return value of the last expression +- `stdout` - Captured stdout +- `error` - Error message if execution failed +- `is_ok` - True if no error + +**Example:** + +```python +result = await session.run(''' +import json +data = tools.curl.get(url="https://api.github.com/users/octocat") +json.loads(data)["public_repos"] +''') + +if result.is_ok: + print(f"Repos: {result.value}") +else: + print(f"Error: {result.error}") +``` + +--- + +## Capability Query + +### supports() + +Check if the session supports a specific capability. + +```python +def supports(self, capability: str) -> bool +``` + +**Example:** + +```python +if session.supports("timeout"): + result = await session.run(code, timeout=30.0) +``` + +### supported_capabilities() + +Get all capabilities supported by the current executor. + +```python +def supported_capabilities(self) -> set[str] +``` + +**Example:** + +```python +caps = session.supported_capabilities() +# {'timeout', 'process_isolation', 'reset', ...} +``` + +**Available capabilities:** + +| Capability | Description | +|------------|-------------| +| `timeout` | Supports execution timeout | +| `process_isolation` | Code runs in separate process | +| `container_isolation` | Code runs in container | +| `network_isolation` | Can disable network access | +| `reset` | Supports environment reset | +| `deps_install` | Can install dependencies | + +--- + +## Tools Methods + +### list_tools() + +List all available tools. + +```python +async def list_tools(self) -> list[dict[str, Any]] +``` + +**Returns:** List of tool info dicts with `name`, `description`, `tags`. + +**Example:** + +```python +tools = await session.list_tools() +for tool in tools: + print(f"{tool['name']}: {tool['description']}") +``` + +### search_tools() + +Search tools by keyword or semantic similarity. + +```python +async def search_tools( + self, + query: str, + limit: int = 10 +) -> list[dict[str, Any]] +``` + +**Example:** + +```python +http_tools = await session.search_tools("make HTTP requests") +``` + +--- + +## Skills Methods + +### list_skills() + +List all available skills. + +```python +async def list_skills(self) -> list[dict[str, Any]] +``` + +**Returns:** List of skill summaries (name, description, parameters - no source). + +### search_skills() + +Search skills by semantic similarity. + +```python +async def search_skills( + self, + query: str, + limit: int = 5 +) -> list[dict[str, Any]] +``` + +**Example:** + +```python +skills = await session.search_skills("fetch GitHub repository data") +``` + +### get_skill() + +Get a specific skill by name, including source code. + +```python +async def get_skill(self, name: str) -> dict[str, Any] | None +``` + +**Returns:** Skill dict with `name`, `description`, `parameters`, `source`, or None if not found. + +**Example:** + +```python +skill = await session.get_skill("fetch_json") +if skill: + print(skill["source"]) +``` + +### add_skill() + +Create and persist a new skill. + +```python +async def add_skill( + self, + name: str, + source: str, + description: str +) -> dict[str, Any] +``` + +**Example:** + +```python +await session.add_skill( + name="fetch_json", + source='''def run(url: str) -> dict: + import json + response = tools.curl.get(url=url) + return json.loads(response) +''', + description="Fetch and parse JSON from a URL" +) +``` + +### remove_skill() + +Remove a skill by name. + +```python +async def remove_skill(self, name: str) -> bool +``` + +**Returns:** True if removed, False if not found. + +--- + +## Artifacts Methods + +### list_artifacts() + +List all stored artifacts. + +```python +async def list_artifacts(self) -> list[dict[str, Any]] +``` + +**Returns:** List of artifact info with `name`, `path`, `description`, `metadata`, `created_at`. + +### save_artifact() + +Save data as an artifact. + +```python +async def save_artifact( + self, + name: str, + data: Any, + description: str = "", + metadata: dict[str, Any] | None = None +) -> dict[str, Any] +``` + +**Example:** + +```python +await session.save_artifact( + name="analysis_results", + data={"repos": 42, "stars": 1000}, + description="GitHub analysis results" +) +``` + +### load_artifact() + +Load artifact data by name. + +```python +async def load_artifact(self, name: str) -> Any +``` + +**Example:** + +```python +data = await session.load_artifact("analysis_results") +``` + +### delete_artifact() + +Delete an artifact. + +```python +async def delete_artifact(self, name: str) -> None +``` + +--- + +## Dependencies Methods + +### list_deps() + +List configured dependencies. + +```python +async def list_deps(self) -> list[str] +``` + +### add_dep() + +Add and install a dependency. + +```python +async def add_dep(self, package: str) -> dict[str, Any] +``` + +**Returns:** Dict with `installed`, `already_present`, `failed` keys. + +**Example:** + +```python +result = await session.add_dep("pandas>=2.0") +if result.get("installed"): + print("pandas installed") +``` + +### remove_dep() + +Remove a dependency. + +```python +async def remove_dep(self, package: str) -> dict[str, Any] +``` + +**Returns:** Dict with `removed`, `not_found`, `failed`, `removed_from_config` keys. + +### sync_deps() + +Install all pre-configured dependencies. + +```python +async def sync_deps(self) -> dict[str, Any] +``` + +**Returns:** Dict with `installed`, `already_present`, `failed` keys. + +**Example:** + +```python +# Manually sync deps (alternative to sync_deps_on_start=True) +result = await session.sync_deps() +print(f"Installed: {result['installed']}") +``` + +--- + +## Properties + +### storage + +Access the underlying storage backend. + +```python +@property +def storage(self) -> StorageBackend +``` + +**Example:** + +```python +# Access storage for advanced operations +skill_library = session.storage.get_skill_library() +``` + +--- + +## Context Manager + +Session implements async context manager for automatic lifecycle management: + +```python +async with Session(storage=storage, executor=executor) as session: + # session.start() called automatically + result = await session.run(code) + # session.close() called automatically on exit +``` + +This is the recommended pattern. Manual lifecycle management: + +```python +session = Session(storage=storage, executor=executor) +await session.start() +try: + result = await session.run(code) +finally: + await session.close() +``` From f4513d24ea9e462cac5dc53d19ac81f032abf28c Mon Sep 17 00:00:00 2001 From: actae0n <19864268+xpcmdshell@users.noreply.github.com> Date: Fri, 2 Jan 2026 11:18:29 -0800 Subject: [PATCH 3/3] docs: fix namespace count (four, not three), add features list to README --- README.md | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 54ef238..d6f968e 100644 --- a/README.md +++ b/README.md @@ -66,13 +66,23 @@ skills.create( claude mcp add py-code-mode -- uvx --from git+https://github.com/xpcmdshell/py-code-mode.git@v0.9.0 py-code-mode-mcp ``` -## Three Namespaces +## Features -When agents write code, three namespaces are available: +- **Skill persistence** - Save working code as reusable skills, invoke later without re-planning +- **Semantic search** - Find relevant skills and tools by natural language description +- **Tool integration** - Wrap CLI commands, MCP servers, and HTTP APIs as callable functions +- **Process isolation** - SubprocessExecutor runs code in a separate process with clean venv +- **Multiple storage backends** - FileStorage for local dev, RedisStorage for distributed deployments +- **Runtime dependency management** - Install packages on-demand or pre-configure for lockdown + +## Four Namespaces + +When agents write code, four namespaces are available: **tools**: CLI commands, MCP servers, and REST APIs wrapped as callable functions **skills**: Reusable Python workflows with semantic search **artifacts**: Persistent data storage across sessions +**deps**: Runtime Python package management ```python # Tools: external capabilities @@ -90,6 +100,10 @@ def run(repos: list) -> dict: # Artifacts: persistent storage artifacts.save("results", data) cached = artifacts.load("results") + +# Deps: runtime package management +deps.add("pandas>=2.0") +deps.list() ``` For programmatic access without code strings, Session also provides facade methods: