diff --git a/pyproject.toml b/pyproject.toml index a061fc21..d641fa4b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,6 +29,7 @@ dependencies = [ "boto3>=1.40.52", "botocore>=1.40.52", "pydantic>=2.0.0,<2.41.3", + "pyyaml>=6.0", "urllib3>=1.26.0", "starlette>=0.46.2", "typing-extensions>=4.13.2,<5.0.0", diff --git a/src/bedrock_agentcore/__init__.py b/src/bedrock_agentcore/__init__.py index a77472f5..c4bfb9bb 100644 --- a/src/bedrock_agentcore/__init__.py +++ b/src/bedrock_agentcore/__init__.py @@ -1,11 +1,13 @@ """BedrockAgentCore Runtime SDK - A Python SDK for building and deploying AI agents.""" +from .project import Project from .runtime import BedrockAgentCoreApp, BedrockAgentCoreContext, RequestContext from .runtime.models import PingStatus __all__ = [ "BedrockAgentCoreApp", + "PingStatus", + "Project", "RequestContext", "BedrockAgentCoreContext", - "PingStatus", ] diff --git a/src/bedrock_agentcore/memory/__init__.py b/src/bedrock_agentcore/memory/__init__.py index 6a7a3af7..f5b26bd3 100644 --- a/src/bedrock_agentcore/memory/__init__.py +++ b/src/bedrock_agentcore/memory/__init__.py @@ -2,6 +2,14 @@ from .client import MemoryClient from .controlplane import MemoryControlPlaneClient +from .memory import Memory from .session import Actor, MemorySession, MemorySessionManager -__all__ = ["Actor", "MemoryClient", "MemorySession", "MemorySessionManager", "MemoryControlPlaneClient"] +__all__ = [ + "Actor", + "Memory", + "MemoryClient", + "MemoryControlPlaneClient", + "MemorySession", + "MemorySessionManager", +] diff --git a/src/bedrock_agentcore/memory/client.py b/src/bedrock_agentcore/memory/client.py index 923c1e01..316c0990 100644 --- a/src/bedrock_agentcore/memory/client.py +++ b/src/bedrock_agentcore/memory/client.py @@ -53,6 +53,8 @@ class MemoryClient: "get_event", "delete_event", "list_events", + "list_actors", + "list_sessions", } # AgentCore Memory control plane methods @@ -779,6 +781,87 @@ def my_llm(user_input: str, memories: List[Dict]) -> str: logger.info("Completed full conversation turn with LLM") return retrieved_memories, agent_response, event + def list_actors(self, memory_id: str, max_results: int = 100) -> List[Dict[str, Any]]: + """List all actors who have events in a memory. + + Args: + memory_id: The memory resource ID + max_results: Maximum number of actors to return (default: 100) + + Returns: + List of actor summary dictionaries + + Example: + actors = client.list_actors(memory_id="mem-123") + for actor in actors: + print(f"Actor: {actor['actorId']}") + """ + logger.debug("Listing actors for memory: %s", memory_id) + try: + actors: List[Dict[str, Any]] = [] + next_token = None + + while len(actors) < max_results: + params: Dict[str, Any] = {"memoryId": memory_id} + if next_token: + params["nextToken"] = next_token + + response = self.gmdp_client.list_actors(**params) + batch = response.get("actorSummaries", []) + actors.extend(batch) + + next_token = response.get("nextToken") + if not next_token or len(actors) >= max_results: + break + + logger.debug("Found %d actors", len(actors)) + return actors[:max_results] + + except ClientError as e: + logger.error("Failed to list actors: %s", e) + raise + + def list_sessions(self, memory_id: str, actor_id: str, max_results: int = 100) -> List[Dict[str, Any]]: + """List all sessions for an actor. + + Args: + memory_id: The memory resource ID + actor_id: The actor ID + max_results: Maximum number of sessions to return (default: 100) + + Returns: + List of session summary dictionaries + + Example: + sessions = client.list_sessions(memory_id="mem-123", actor_id="user-456") + for session in sessions: + print(f"Session: {session['sessionId']}") + """ + logger.debug("Listing sessions for actor: %s in memory: %s", actor_id, memory_id) + try: + sessions: List[Dict[str, Any]] = [] + next_token = None + + while len(sessions) < max_results: + params: Dict[str, Any] = {"memoryId": memory_id, "actorId": actor_id} + if next_token: + params["nextToken"] = next_token + + response = self.gmdp_client.list_sessions(**params) + batch = response.get("sessionSummaries", []) + sessions.extend(batch) + + next_token = response.get("nextToken") + if not next_token or len(sessions) >= max_results: + break + + logger.debug("Found %d sessions", len(sessions)) + return sessions[:max_results] + + except ClientError as e: + logger.error("Failed to list sessions: %s", e) + raise + def list_events( self, memory_id: str, @@ -1852,6 +1935,104 @@ def wait_for_memories( logger.info("Note: Encountered %d service errors during polling", service_errors) return False + def enable_observability( + self, + memory_id: str, + memory_arn: Optional[str] = None, + enable_logs: bool = True, + enable_traces: bool = True, + ) -> Dict[str, Any]: + """Enable CloudWatch observability for a memory resource. + + This method sets up CloudWatch Logs delivery for memory APPLICATION_LOGS + and optionally X-Ray delivery for TRACES. + + Args: + memory_id: The memory resource ID + memory_arn: Optional memory ARN (constructed from memory_id if not provided) + enable_logs: Whether to enable APPLICATION_LOGS delivery (default: True) + enable_traces: Whether to enable TRACES delivery to X-Ray (default: True) + + Returns: + Dictionary with status and configuration details: + { + "status": "success" | "failed", + "log_group": str, # CloudWatch log group name + "error": str # Only present if status is "failed" + } + + Example: + result = client.enable_observability( + memory_id="mem-123", + enable_logs=True, + enable_traces=True + ) + if result["status"] == "success": + print(f"Logs available at: {result['log_group']}") + + Note: + This method requires CloudWatch Logs permissions: + - logs:CreateLogGroup + - logs:PutDeliverySource + - logs:PutDeliveryDestination + - logs:CreateDelivery + """ + # TODO: Implement ObservabilityDeliveryManager + # Reference implementation: + # https://github.com/aws/bedrock-agentcore-starter-toolkit/blob/main/src/bedrock_agentcore_starter_toolkit/operations/observability/delivery.py + raise NotImplementedError( + "enable_observability() is not yet implemented. " + "See starter-toolkit for reference: " + "https://github.com/aws/bedrock-agentcore-starter-toolkit/blob/main/src/bedrock_agentcore_starter_toolkit/operations/observability/delivery.py" + ) + + def disable_observability( + self, + memory_id: str, + delete_log_group: bool = False, + ) -> Dict[str, Any]: + """Disable CloudWatch observability for a memory resource. + + This method removes the CloudWatch Logs delivery infrastructure + for the specified memory. + + Args: + memory_id: The memory resource ID + delete_log_group: Whether to also delete the CloudWatch log group (default: False) + + Returns: + Dictionary with status and any errors: + { + "status": "success" | "partial", + "errors": List[str] # Only present if status is "partial" + } + + Example: + # Disable delivery but keep logs + result = client.disable_observability(memory_id="mem-123") + + # Disable delivery and delete log group + result = client.disable_observability( + memory_id="mem-123", + delete_log_group=True + ) + + Note: + This method requires CloudWatch Logs permissions: + - logs:DeleteDeliverySource + - logs:DeleteDeliveryDestination + - logs:DeleteDelivery + - logs:DeleteLogGroup (if delete_log_group=True) + """ + # TODO: Implement ObservabilityDeliveryManager + # Reference implementation: + # https://github.com/aws/bedrock-agentcore-starter-toolkit/blob/main/src/bedrock_agentcore_starter_toolkit/operations/observability/delivery.py + raise NotImplementedError( + "disable_observability() is not yet implemented. " + "See starter-toolkit for reference: " + "https://github.com/aws/bedrock-agentcore-starter-toolkit/blob/main/src/bedrock_agentcore_starter_toolkit/operations/observability/delivery.py" + ) + def add_strategy(self, memory_id: str, strategy: Dict[str, Any]) -> Dict[str, Any]: """Add a strategy to a memory (without waiting). @@ -1873,6 +2054,59 @@ def add_strategy(self, memory_id: str, strategy: Dict[str, Any]) -> Dict[str, An ) return self._add_strategy(memory_id, strategy) + def add_strategy_and_wait( + self, + memory_id: str, + strategy: Dict[str, Any], + max_wait: int = 300, + poll_interval: int = 10, + ) -> Dict[str, Any]: + """Add a strategy to a memory and wait for it to return to ACTIVE state. + + This is a generic method that accepts any strategy type as a dictionary. + For typed convenience methods, use add_semantic_strategy_and_wait(), + add_summary_strategy_and_wait(), etc. + + Args: + memory_id: Memory resource ID + strategy: Strategy configuration dictionary (e.g., {"semanticMemoryStrategy": {...}}) + max_wait: Maximum seconds to wait (default: 300) + poll_interval: Seconds between status checks (default: 10) + + Returns: + Updated memory object in ACTIVE state + + Example: + # Add a semantic strategy + memory = client.add_strategy_and_wait( + memory_id="mem-123", + strategy={ + "semanticMemoryStrategy": { + "name": "my-strategy", + "description": "Extract key facts", + "namespaces": ["facts/{actorId}/{sessionId}/"] + } + } + ) + + # Add a custom strategy + memory = client.add_strategy_and_wait( + memory_id="mem-123", + strategy={ + "customMemoryStrategy": { + "name": "custom-strategy", + "configuration": {...} + } + } + ) + """ + return self.update_memory_strategies_and_wait( + memory_id=memory_id, + add_strategies=[strategy], + max_wait=max_wait, + poll_interval=poll_interval, + ) + # Private methods def _normalize_memory_response(self, memory: Dict[str, Any]) -> Dict[str, Any]: diff --git a/src/bedrock_agentcore/memory/config.py b/src/bedrock_agentcore/memory/config.py new file mode 100644 index 00000000..5e83e423 --- /dev/null +++ b/src/bedrock_agentcore/memory/config.py @@ -0,0 +1,52 @@ +"""Configuration models for Bedrock AgentCore Memory. + +This module provides Pydantic models for Memory configuration +with YAML serialization support. +""" + +from enum import Enum +from typing import Dict, List, Optional + +from pydantic import BaseModel, ConfigDict, Field + + +class StrategyType(str, Enum): + """Memory strategy types.""" + + SEMANTIC = "SEMANTIC" + SUMMARY = "SUMMARY" + USER_PREFERENCE = "USER_PREFERENCE" + CUSTOM_SEMANTIC = "CUSTOM_SEMANTIC" + + +class StrategyConfigModel(BaseModel): + """Memory strategy configuration.""" + + model_config = ConfigDict(populate_by_name=True) + + strategy_type: StrategyType = Field(alias="type") + namespace: str + custom_prompt: Optional[str] = Field(default=None, alias="customPrompt") + + +class MemoryConfigModel(BaseModel): + """Complete memory configuration model. + + This model represents the configuration for a Bedrock AgentCore memory, + suitable for YAML serialization and deserialization. + + Attributes: + name: Unique memory name + description: Optional description + strategies: List of memory extraction strategies + encryption_key_arn: Optional KMS key ARN for encryption + tags: Resource tags + """ + + model_config = ConfigDict(populate_by_name=True) + + name: str + description: Optional[str] = None + strategies: Optional[List[StrategyConfigModel]] = None + encryption_key_arn: Optional[str] = Field(default=None, alias="encryptionKeyArn") + tags: Optional[Dict[str, str]] = None diff --git a/src/bedrock_agentcore/memory/memory.py b/src/bedrock_agentcore/memory/memory.py new file mode 100644 index 00000000..99a1b28e --- /dev/null +++ b/src/bedrock_agentcore/memory/memory.py @@ -0,0 +1,433 @@ +"""Memory class for managing Bedrock AgentCore Memory resources. + +This module provides a high-level Memory class that wraps memory operations +for Bedrock AgentCore Memory resources. +""" + +import logging +import time +from typing import TYPE_CHECKING, Any, Dict, List, Optional + +from botocore.exceptions import ClientError + +from .client import MemoryClient +from .config import MemoryConfigModel, StrategyConfigModel, StrategyType + +if TYPE_CHECKING: + from .session import MemorySession + +logger = logging.getLogger(__name__) + + +class Memory: + """Represents a Bedrock AgentCore Memory resource. + + Each Memory instance manages a single memory resource. Use Project.from_json() + to load memories from configuration files. + + Example: + # Create with config + memory = Memory( + name="my-memory", + strategies=[{"type": "SEMANTIC", "namespace": "facts/{sessionId}/"}] + ) + memory.launch() + + # Get a session for conversational operations + session = memory.get_session(actor_id="user-123", session_id="sess-456") + + Attributes: + name: Memory name + config: Memory configuration model + memory_id: ID of created memory resource (if created) + is_active: Whether the memory is active + """ + + def __init__( + self, + name: str, + description: Optional[str] = None, + strategies: Optional[List[Dict[str, Any]]] = None, + encryption_key_arn: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + region: Optional[str] = None, + ): + """Create a Memory instance with full configuration. + + Args: + name: Unique memory name + description: Optional description + strategies: List of strategy configs [{"type": "SEMANTIC", "namespace": "..."}] + encryption_key_arn: Optional KMS key ARN for encryption + tags: Resource tags + region: AWS region (defaults to boto3 default or us-west-2) + """ + self._name = name + self._region = region + self._memory_id: Optional[str] = None + + # Build config model + strategy_models = None + if strategies: + strategy_models = [ + StrategyConfigModel( + type=StrategyType(s["type"]), + namespace=s["namespace"], + customPrompt=s.get("customPrompt"), + ) + for s in strategies + ] + + self._config = MemoryConfigModel( + name=name, + description=description, + strategies=strategy_models, + encryptionKeyArn=encryption_key_arn, + tags=tags, + ) + + # Initialize client + self._client = MemoryClient(region_name=region) + + logger.info("Initialized Memory '%s' in region %s", name, self._client.region_name) + + # ==================== PROPERTIES ==================== + + @property + def name(self) -> str: + """Memory name.""" + return self._name + + @property + def config(self) -> MemoryConfigModel: + """Current configuration.""" + return self._config + + @property + def memory_id(self) -> Optional[str]: + """Memory ID if created.""" + return self._memory_id + + @property + def is_active(self) -> bool: + """Whether memory is active.""" + if not self._memory_id: + return False + try: + status = self._client.get_memory_status(self._memory_id) + return status == "ACTIVE" + except ClientError: + return False + + # ==================== OPERATIONS ==================== + + def launch( + self, + max_wait: int = 600, + poll_interval: int = 10, + ) -> Dict[str, Any]: + """Launch the memory resource in AWS (create if not exists). + + This method is idempotent - it will create the memory if it doesn't exist, + or return the existing memory if it already exists. + + To update strategies on an existing memory, use add_strategy(). + + Waits for the memory to become ACTIVE before returning. + + Args: + max_wait: Max seconds to wait + poll_interval: Seconds between status checks + + Returns: + Memory details + + Raises: + ClientError: If AWS API call fails + TimeoutError: If wait times out + """ + # Check if memory already exists + self._refresh_memory_state() + + if self._memory_id: + # Memory exists - return current state + logger.info("Memory '%s' already exists with ID: %s", self._name, self._memory_id) + return self._client.get_memory(self._memory_id) + + # Convert strategies to API format + strategies = [] + if self._config.strategies: + for s in self._config.strategies: + strategy = { + "memoryStrategyType": s.strategy_type.value, + "namespace": s.namespace, + } + if s.custom_prompt: + strategy["customPrompt"] = s.custom_prompt + strategies.append(strategy) + + # Memory doesn't exist - create it + logger.info("Creating memory '%s'...", self._name) + memory = self._client.create_memory_and_wait( + name=self._name, + strategies=strategies, + description=self._config.description, + max_wait=max_wait, + poll_interval=poll_interval, + ) + self._memory_id = memory.get("memoryId", memory.get("id")) + logger.info("Created memory with ID: %s", self._memory_id) + + return memory + + def delete(self, max_wait: int = 300, poll_interval: int = 10) -> Dict[str, Any]: + """Delete the memory resource from AWS. + + Waits for deletion to complete before returning. + + Args: + max_wait: Max seconds to wait + poll_interval: Seconds between status checks + + Returns: + Deletion result + + Raises: + ClientError: If AWS API call fails + """ + if not self._memory_id: + logger.warning("Memory '%s' is not created, nothing to delete", self._name) + return {"status": "NOT_CREATED"} + + logger.info("Deleting memory '%s'...", self._name) + + try: + response = self._client.delete_memory(memory_id=self._memory_id) + + self._wait_for_deleted(max_wait, poll_interval) + + # Clear state + self._memory_id = None + + logger.info("Memory '%s' deleted", self._name) + return response + + except ClientError as e: + if e.response["Error"]["Code"] == "ResourceNotFoundException": + logger.warning("Memory '%s' not found, may already be deleted", self._name) + self._memory_id = None + return {"status": "NOT_FOUND"} + raise + + def add_strategy( + self, + strategy_type: str, + namespace: str, + custom_prompt: Optional[str] = None, + max_wait: int = 300, + poll_interval: int = 10, + ) -> Dict[str, Any]: + """Add a strategy to the memory. + + Waits for the update to complete before returning. + + Args: + strategy_type: Strategy type (SEMANTIC, SUMMARY, USER_PREFERENCE, CUSTOM_SEMANTIC) + namespace: Namespace for the strategy + custom_prompt: Custom extraction prompt (for CUSTOM_SEMANTIC) + max_wait: Max seconds to wait + poll_interval: Seconds between status checks + + Returns: + Updated memory details + + Raises: + ValueError: If memory is not created + ClientError: If AWS API call fails + """ + if not self._memory_id: + raise ValueError("Memory is not launched. Call launch() first.") + + strategy = { + "memoryStrategyType": strategy_type, + "namespace": namespace, + } + if custom_prompt: + strategy["customPrompt"] = custom_prompt + + logger.info("Adding strategy '%s' to memory '%s'...", strategy_type, self._name) + + return self._client.add_strategy_and_wait( + memory_id=self._memory_id, + strategy=strategy, + max_wait=max_wait, + poll_interval=poll_interval, + ) + + def get_session(self, actor_id: str, session_id: str) -> "MemorySession": + """Get a session for conversational operations. + + Args: + actor_id: Actor identifier (e.g., user ID) + session_id: Session identifier (e.g., conversation ID) + + Returns: + MemorySession instance with methods: + - add_turns(messages): Add conversation messages + - get_last_k_turns(k): Get recent conversation history + - process_turn_with_llm(user_input, llm_callback, retrieval_config): Process with LLM + - fork_conversation(messages, root_event_id, branch_name): Create conversation branch + - get_event(event_id): Get a specific event + + Raises: + ValueError: If memory is not launched + + Example: + session = memory.get_session(actor_id="user-123", session_id="conv-456") + + # Add conversation turns + session.add_turns([ + ConversationalMessage("Hello!", MessageRole.USER), + ConversationalMessage("Hi there!", MessageRole.ASSISTANT) + ]) + + # Get recent history + turns = session.get_last_k_turns(k=5) + + # Process with LLM and memory context + memories, response, event = session.process_turn_with_llm( + user_input="What did we discuss?", + llm_callback=my_llm, + retrieval_config={"facts": RetrievalConfig(namespace="facts/{sessionId}/")} + ) + """ + if not self._memory_id: + raise ValueError("Memory is not launched. Call launch() first.") + + from .session import MemorySessionManager + + manager = MemorySessionManager(memory_id=self._memory_id, region_name=self._client.region_name) + return manager.create_memory_session(actor_id=actor_id, session_id=session_id) + + def list_events( + self, + actor_id: str, + session_id: str, + branch_name: Optional[str] = None, + max_results: int = 100, + ) -> List[Dict[str, Any]]: + """List events in a session. + + Args: + actor_id: Actor identifier + session_id: Session identifier + branch_name: Optional branch name to filter + max_results: Maximum results to return + + Returns: + List of events + + Raises: + ValueError: If memory is not created + """ + if not self._memory_id: + raise ValueError("Memory is not launched. Call launch() first.") + + params: Dict[str, Any] = { + "memoryId": self._memory_id, + "actorId": actor_id, + "sessionId": session_id, + "maxResults": max_results, + } + + if branch_name: + params["branchName"] = branch_name + + response = self._client.gmdp_client.list_events(**params) + events = response.get("events", []) + return list(events) if events else [] + + def search_records( + self, + query: str, + namespace: str, + top_k: int = 10, + ) -> List[Dict[str, Any]]: + """Search memory records. + + Args: + query: Search query + namespace: Namespace to search in + top_k: Maximum results to return + + Returns: + List of matching memory records + + Raises: + ValueError: If memory is not created + """ + if not self._memory_id: + raise ValueError("Memory is not launched. Call launch() first.") + + return self._client.retrieve_memories( + memory_id=self._memory_id, + namespace=namespace, + query=query, + top_k=top_k, + ) + + # ==================== HELPERS ==================== + + def _refresh_memory_state(self) -> None: + """Fetch current memory state from AWS by name.""" + try: + memories = self._client.list_memories() + + for memory in memories: + # Handle both old and new field names + memory_name = memory.get("name") or memory.get("id", "").split("-")[0] + if memory_name == self._name or memory.get("id", "").startswith(self._name): + self._memory_id = memory.get("memoryId", memory.get("id")) + logger.debug("Found existing memory: %s", self._memory_id) + return + + logger.debug("No existing memory found for '%s'", self._name) + + except ClientError as e: + logger.warning("Failed to refresh memory state: %s", e) + + def _wait_for_deleted(self, max_wait: int, poll_interval: int) -> None: + """Poll until memory is deleted. + + Args: + max_wait: Maximum seconds to wait + poll_interval: Seconds between polls + + Raises: + TimeoutError: If max_wait exceeded + """ + if not self._memory_id: + return + + start_time = time.time() + logger.info("Waiting for memory deletion...") + + while time.time() - start_time < max_wait: + try: + response = self._client.gmcp_client.get_memory(memoryId=self._memory_id) + status = response.get("memory", {}).get("status") + logger.debug("Memory status: %s", status) + + if status == "DELETING": + time.sleep(poll_interval) + continue + + except ClientError as e: + if e.response["Error"]["Code"] == "ResourceNotFoundException": + logger.info("Memory deleted") + return + raise + + time.sleep(poll_interval) + + raise TimeoutError(f"Timeout waiting for memory deletion after {max_wait}s") diff --git a/src/bedrock_agentcore/project.py b/src/bedrock_agentcore/project.py new file mode 100644 index 00000000..f8742732 --- /dev/null +++ b/src/bedrock_agentcore/project.py @@ -0,0 +1,510 @@ +"""Project class for managing Bedrock AgentCore resources. + +This module provides a Project class that loads/saves agentcore.json +configuration and manages collections of Agent and Memory objects. +""" + +import json +import logging +from pathlib import Path +from typing import Any, Dict, List, Optional + +import boto3 + +from .memory import Memory +from .project_config import ( + AgentConfig, + AgentDeployedState, + AWSTarget, + DeployedState, + MemoryProviderConfig, + MemoryRelation, + MemoryStrategyConfig, + MemoryStrategyType, + NetworkMode, + ProjectConfig, + PythonVersion, + RuntimeConfig, + TargetDeployedState, + TargetResources, +) +from .runtime import Agent +from .runtime.build import DirectCodeDeploy + +logger = logging.getLogger(__name__) + + +class Project: + """Manages Bedrock AgentCore resources with agentcore.json configuration. + + Project provides a container for Agent and Memory resources with + JSON-based persistence matching the starter-toolkit format. + + Example: + # Load from agentcore.json + project = Project.from_json("agentcore.json") + + # Access resources + agents = project.agents + memories = project.memories + + # Launch all resources + project.launch_all() + + # Save back to JSON + project.save("agentcore.json") + + Attributes: + name: Project name + agents: List of Agent objects + memories: List of Memory objects + """ + + def __init__( + self, + name: str, + version: Optional[str] = None, + description: Optional[str] = None, + region: Optional[str] = None, + ): + """Create a Project instance. + + Args: + name: Project name (max 23 chars) + version: Project version + description: Project description + region: AWS region (applied to all resources) + """ + self._name = name + self._version = version + self._description = description + self._region = region or boto3.Session().region_name or "us-west-2" + self._agents: Dict[str, Agent] = {} + self._memories: Dict[str, Memory] = {} + + logger.info("Initialized Project '%s' in region %s", name, self._region) + + @classmethod + def from_json(cls, file_path: str, region: Optional[str] = None) -> "Project": + """Load a project from an agentcore.json configuration file. + + Args: + file_path: Path to the agentcore.json file + region: AWS region (overrides config) + + Returns: + Project instance with all resources loaded + + Raises: + FileNotFoundError: If config file doesn't exist + """ + path = Path(file_path) + if not path.exists(): + raise FileNotFoundError(f"Config file not found: {file_path}") + + with open(path, "r") as f: + data = json.load(f) + + config = ProjectConfig.model_validate(data) + project = cls( + name=config.name, + version=config.version, + description=config.description, + region=region, + ) + + # Process agents and their embedded memories + if config.agents: + for agent_config in config.agents: + # Create Memory objects from memoryProviders + if agent_config.memory_providers: + for mem_provider in agent_config.memory_providers: + if mem_provider.type == "AgentCoreMemory" and mem_provider.relation == MemoryRelation.OWN: + # Only create memories that this agent owns + strategies = None + if mem_provider.memory_strategies: + strategies = [ + { + "type": _map_memory_strategy_type(s.type), + "namespace": f"{s.type.value.lower()}/{{sessionId}}/", + } + for s in mem_provider.memory_strategies + ] + + memory = Memory( + name=mem_provider.name, + strategies=strategies, + region=region or project._region, + ) + project.add_memory(memory) + + # Create Agent object + runtime = agent_config.runtime + network_mode = runtime.network_mode.value if runtime.network_mode else "PUBLIC" + + build = DirectCodeDeploy( + source_path=runtime.code_location, + entrypoint=runtime.entrypoint, + ) + + agent = Agent( + name=agent_config.name, + build=build, + network_mode=network_mode, + region=region or project._region, + ) + project.add_agent(agent) + + logger.info("Loaded Project '%s' from %s", config.name, file_path) + return project + + # ==================== PROPERTIES ==================== + + @property + def name(self) -> str: + """Project name.""" + return self._name + + @property + def version(self) -> Optional[str]: + """Project version.""" + return self._version + + @property + def description(self) -> Optional[str]: + """Project description.""" + return self._description + + @property + def region(self) -> str: + """AWS region.""" + return self._region + + @property + def agents(self) -> List[Agent]: + """List of all agents in the project.""" + return list(self._agents.values()) + + @property + def memories(self) -> List[Memory]: + """List of all memories in the project.""" + return list(self._memories.values()) + + # ==================== RESOURCE MANAGEMENT ==================== + + def add_agent(self, agent: Agent) -> "Project": + """Add an agent to the project. + + Args: + agent: Agent object to add + + Returns: + Self for method chaining + """ + self._agents[agent.name] = agent + return self + + def add_memory(self, memory: Memory) -> "Project": + """Add a memory to the project. + + Args: + memory: Memory object to add + + Returns: + Self for method chaining + """ + self._memories[memory.name] = memory + return self + + def get_agent(self, name: str) -> Agent: + """Get an agent by name. + + Args: + name: Agent name + + Returns: + Agent object + + Raises: + KeyError: If agent not found + """ + if name not in self._agents: + raise KeyError(f"Agent not found: {name}") + return self._agents[name] + + def get_memory(self, name: str) -> Memory: + """Get a memory by name. + + Args: + name: Memory name + + Returns: + Memory object + + Raises: + KeyError: If memory not found + """ + if name not in self._memories: + raise KeyError(f"Memory not found: {name}") + return self._memories[name] + + def remove_agent(self, name: str) -> "Project": + """Remove an agent from the project. + + Args: + name: Agent name to remove + + Returns: + Self for method chaining + """ + del self._agents[name] + return self + + def remove_memory(self, name: str) -> "Project": + """Remove a memory from the project. + + Args: + name: Memory name to remove + + Returns: + Self for method chaining + """ + del self._memories[name] + return self + + # ==================== PERSISTENCE ==================== + + def save(self, file_path: str) -> str: + """Save the project configuration to agentcore.json format. + + Args: + file_path: Path to save the JSON config file + + Returns: + The file path where config was saved + """ + # Build agent configs + agent_configs = [] + for agent in self._agents.values(): + # Find associated memories for this agent + memory_providers = [] + for memory in self._memories.values(): + mem_strategies = [] + if memory.config.strategies: + for s in memory.config.strategies: + mem_strategies.append( + MemoryStrategyConfig(type=_reverse_map_memory_strategy_type(s.strategy_type.value)) + ) + + memory_providers.append( + MemoryProviderConfig( + type="AgentCoreMemory", + relation=MemoryRelation.OWN, + name=memory.name, + memory_strategies=mem_strategies if mem_strategies else None, + ) + ) + + # Extract runtime info from agent's build strategy + build = agent.build_strategy + code_location = getattr(build, "source_path", "./src") + entrypoint = getattr(build, "entrypoint", "main.py:handler") + + network_mode = NetworkMode.PUBLIC + if agent.config.network_configuration: + network_mode = NetworkMode(agent.config.network_configuration.network_mode.value) + + runtime = RuntimeConfig( + artifact="CodeZip", + entrypoint=entrypoint, + code_location=code_location, + network_mode=network_mode, + ) + + agent_config = AgentConfig( + name=agent.name, + id=agent.runtime_id, + runtime=runtime, + memory_providers=memory_providers if memory_providers else None, + ) + agent_configs.append(agent_config) + + config = ProjectConfig( + name=self._name, + version=self._version, + description=self._description, + agents=agent_configs if agent_configs else None, + ) + + path = Path(file_path) + with open(path, "w") as f: + json.dump(config.model_dump(mode="json", by_alias=True, exclude_none=True), f, indent=2) + + logger.info("Saved Project config to %s", file_path) + return str(path) + + def save_deployed_state(self, file_path: str, target_name: Optional[str] = None) -> str: + """Save the deployed state to deployed-state.json format. + + Args: + file_path: Path to save the deployed state file + target_name: Target name (defaults to region) + + Returns: + The file path where state was saved + """ + target = target_name or self._region + + # Build agent deployed states + agent_states: Dict[str, AgentDeployedState] = {} + for agent in self._agents.values(): + memory_ids = [m.memory_id for m in self._memories.values() if m.memory_id] + + agent_states[agent.name] = AgentDeployedState( + runtime_id=agent.runtime_id, + runtime_arn=agent.runtime_arn, + memory_ids=memory_ids if memory_ids else None, + ) + + state = DeployedState( + targets={ + target: TargetDeployedState( + resources=TargetResources(agents=agent_states if agent_states else None) + ) + } + ) + + path = Path(file_path) + with open(path, "w") as f: + json.dump(state.model_dump(mode="json", by_alias=True, exclude_none=True), f, indent=2) + + logger.info("Saved deployed state to %s", file_path) + return str(path) + + def save_aws_targets(self, file_path: str, account: Optional[str] = None) -> str: + """Save AWS targets to aws-targets.json format. + + Args: + file_path: Path to save the targets file + account: AWS account ID (auto-detected if not provided) + + Returns: + The file path where targets were saved + """ + if not account: + sts = boto3.client("sts") + account = sts.get_caller_identity()["Account"] + + targets = [ + AWSTarget( + name=self._region.replace("-", ""), + account=account, + region=self._region, + ) + ] + + path = Path(file_path) + with open(path, "w") as f: + json.dump([t.model_dump(mode="json", by_alias=True) for t in targets], f, indent=2) + + logger.info("Saved AWS targets to %s", file_path) + return str(path) + + # ==================== BULK OPERATIONS ==================== + + def launch_all(self, max_wait: int = 600, poll_interval: int = 10) -> Dict[str, Any]: + """Launch all memories and agents. + + Memories are created first, then agents are launched. + + Args: + max_wait: Max seconds to wait for each resource + poll_interval: Seconds between status checks + + Returns: + Dictionary with launch results for each resource + """ + results: Dict[str, Any] = {"memories": {}, "agents": {}} + + # Launch memories first + for memory in self._memories.values(): + logger.info("Launching memory '%s'...", memory.name) + results["memories"][memory.name] = memory.launch( + max_wait=max_wait, + poll_interval=poll_interval, + ) + + # Then launch agents + for agent in self._agents.values(): + logger.info("Launching agent '%s'...", agent.name) + results["agents"][agent.name] = agent.launch( + max_wait=max_wait, + poll_interval=poll_interval, + ) + + return results + + def destroy_all(self, max_wait: int = 300, poll_interval: int = 10) -> Dict[str, Any]: + """Destroy all agents and memories. + + Agents are destroyed first, then memories. + + Args: + max_wait: Max seconds to wait for each resource + poll_interval: Seconds between status checks + + Returns: + Dictionary with destroy results for each resource + """ + results: Dict[str, Any] = {"agents": {}, "memories": {}} + + # Destroy agents first + for agent in self._agents.values(): + logger.info("Destroying agent '%s'...", agent.name) + results["agents"][agent.name] = agent.destroy( + max_wait=max_wait, + poll_interval=poll_interval, + ) + + # Then destroy memories + for memory in self._memories.values(): + logger.info("Destroying memory '%s'...", memory.name) + results["memories"][memory.name] = memory.delete( + max_wait=max_wait, + poll_interval=poll_interval, + ) + + return results + + def status(self) -> Dict[str, Any]: + """Get status of all resources. + + Returns: + Dictionary with status for each agent and memory + """ + return { + "agents": {a.name: {"deployed": a.is_deployed, "runtime_id": a.runtime_id} for a in self._agents.values()}, + "memories": {m.name: {"active": m.is_active, "memory_id": m.memory_id} for m in self._memories.values()}, + } + + +def _map_memory_strategy_type(strategy_type: MemoryStrategyType) -> str: + """Map project config strategy type to Memory strategy type.""" + mapping = { + MemoryStrategyType.SEMANTIC: "SEMANTIC", + MemoryStrategyType.SUMMARIZATION: "SUMMARY", + MemoryStrategyType.USER_PREFERENCE: "USER_PREFERENCE", + MemoryStrategyType.CUSTOM: "CUSTOM_SEMANTIC", + } + return mapping.get(strategy_type, strategy_type.value) + + +def _reverse_map_memory_strategy_type(strategy_type: str) -> MemoryStrategyType: + """Map Memory strategy type back to project config strategy type.""" + mapping = { + "SEMANTIC": MemoryStrategyType.SEMANTIC, + "SUMMARY": MemoryStrategyType.SUMMARIZATION, + "USER_PREFERENCE": MemoryStrategyType.USER_PREFERENCE, + "CUSTOM_SEMANTIC": MemoryStrategyType.CUSTOM, + } + return mapping.get(strategy_type, MemoryStrategyType.SEMANTIC) diff --git a/src/bedrock_agentcore/project_config.py b/src/bedrock_agentcore/project_config.py new file mode 100644 index 00000000..597e93ba --- /dev/null +++ b/src/bedrock_agentcore/project_config.py @@ -0,0 +1,174 @@ +"""Configuration models for Bedrock AgentCore Project. + +This module provides Pydantic models for Project configuration +matching the starter-toolkit agentcore.json schema. +""" + +from enum import Enum +from typing import Any, Dict, List, Optional + +from pydantic import BaseModel, ConfigDict, Field + + +class PythonVersion(str, Enum): + """Supported Python versions.""" + + PYTHON_3_10 = "PYTHON_3_10" + PYTHON_3_11 = "PYTHON_3_11" + PYTHON_3_12 = "PYTHON_3_12" + PYTHON_3_13 = "PYTHON_3_13" + + +class NetworkMode(str, Enum): + """Network mode options.""" + + PUBLIC = "PUBLIC" + PRIVATE = "PRIVATE" + + +class MemoryRelation(str, Enum): + """Memory provider relation type.""" + + OWN = "own" + USE = "use" + + +class MemoryStrategyType(str, Enum): + """Memory strategy types.""" + + SEMANTIC = "SEMANTIC" + SUMMARIZATION = "SUMMARIZATION" + USER_PREFERENCE = "USER_PREFERENCE" + CUSTOM = "CUSTOM" + + +# ==================== Runtime Config ==================== + + +class InstrumentationConfig(BaseModel): + """Instrumentation configuration.""" + + model_config = ConfigDict(populate_by_name=True) + + enable_otel: bool = Field(default=False, alias="enableOtel") + + +class RuntimeConfig(BaseModel): + """Agent runtime configuration.""" + + model_config = ConfigDict(populate_by_name=True) + + artifact: str = "CodeZip" + name: Optional[str] = None + python_version: PythonVersion = Field(default=PythonVersion.PYTHON_3_12, alias="pythonVersion") + entrypoint: str + code_location: str = Field(alias="codeLocation") + network_mode: NetworkMode = Field(default=NetworkMode.PUBLIC, alias="networkMode") + instrumentation: Optional[InstrumentationConfig] = None + + +# ==================== Memory Provider Config ==================== + + +class MemoryStrategyConfig(BaseModel): + """Memory strategy configuration.""" + + model_config = ConfigDict(populate_by_name=True) + + type: MemoryStrategyType + + +class MemoryProviderConfig(BaseModel): + """Memory provider configuration within an agent.""" + + model_config = ConfigDict(populate_by_name=True) + + type: str = "AgentCoreMemory" + relation: MemoryRelation = MemoryRelation.OWN + name: str + event_expiry_duration: Optional[int] = Field(default=None, alias="eventExpiryDuration") + memory_strategies: Optional[List[MemoryStrategyConfig]] = Field(default=None, alias="memoryStrategies") + + +# ==================== Agent Config ==================== + + +class AgentConfig(BaseModel): + """Agent configuration within a project.""" + + model_config = ConfigDict(populate_by_name=True) + + name: str = Field(max_length=64) + id: Optional[str] = None + runtime: RuntimeConfig + memory_providers: Optional[List[MemoryProviderConfig]] = Field(default=None, alias="memoryProviders") + + +# ==================== Project Config ==================== + + +class ProjectConfig(BaseModel): + """Complete project configuration model. + + This model represents the configuration for a Bedrock AgentCore project, + matching the starter-toolkit agentcore.json schema. + """ + + model_config = ConfigDict(populate_by_name=True) + + name: str = Field(max_length=23) + version: Optional[str] = None + description: Optional[str] = None + agents: Optional[List[AgentConfig]] = None + + +# ==================== AWS Targets Config ==================== + + +class AWSTarget(BaseModel): + """AWS deployment target.""" + + model_config = ConfigDict(populate_by_name=True) + + name: str + account: str + region: str + + +# ==================== Deployed State Config ==================== + + +class AgentDeployedState(BaseModel): + """Deployed state for a single agent.""" + + model_config = ConfigDict(populate_by_name=True) + + runtime_id: Optional[str] = Field(default=None, alias="runtimeId") + runtime_arn: Optional[str] = Field(default=None, alias="runtimeArn") + role_arn: Optional[str] = Field(default=None, alias="roleArn") + session_id: Optional[str] = Field(default=None, alias="sessionId") + memory_ids: Optional[List[str]] = Field(default=None, alias="memoryIds") + + +class TargetResources(BaseModel): + """Resources deployed to a target.""" + + model_config = ConfigDict(populate_by_name=True) + + agents: Optional[Dict[str, AgentDeployedState]] = None + + +class TargetDeployedState(BaseModel): + """Deployed state for a target.""" + + model_config = ConfigDict(populate_by_name=True) + + resources: Optional[TargetResources] = None + + +class DeployedState(BaseModel): + """Complete deployed state model.""" + + model_config = ConfigDict(populate_by_name=True) + + targets: Optional[Dict[str, TargetDeployedState]] = None diff --git a/src/bedrock_agentcore/runtime/__init__.py b/src/bedrock_agentcore/runtime/__init__.py index b86c8aaa..c3b60736 100644 --- a/src/bedrock_agentcore/runtime/__init__.py +++ b/src/bedrock_agentcore/runtime/__init__.py @@ -4,17 +4,26 @@ - BedrockAgentCoreApp: Main application class - RequestContext: HTTP request context - BedrockAgentCoreContext: Agent identity context +- Agent: High-level runtime management with YAML config +- Build strategies: ECR, DirectCodeDeploy """ +from .agent import Agent from .agent_core_runtime_client import AgentCoreRuntimeClient from .app import BedrockAgentCoreApp +from .build import Build, DirectCodeDeploy, ECR from .context import BedrockAgentCoreContext, RequestContext from .models import PingStatus __all__ = [ + "Agent", "AgentCoreRuntimeClient", "BedrockAgentCoreApp", "RequestContext", "BedrockAgentCoreContext", "PingStatus", + # Build strategies + "Build", + "ECR", + "DirectCodeDeploy", ] diff --git a/src/bedrock_agentcore/runtime/agent.py b/src/bedrock_agentcore/runtime/agent.py new file mode 100644 index 00000000..5ad0299a --- /dev/null +++ b/src/bedrock_agentcore/runtime/agent.py @@ -0,0 +1,646 @@ +"""Agent class for managing Bedrock AgentCore Runtimes. + +This module provides a high-level Agent class that wraps runtime operations +with Build strategy support for container and code deployment. +""" + +import json +import logging +import time +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union + +import boto3 +from botocore.config import Config +from botocore.exceptions import ClientError + +from bedrock_agentcore._utils.user_agent import build_user_agent_suffix + +from .config import ( + BuildConfigModel, + BuildStrategyType, + NetworkConfigurationModel, + NetworkMode, + RuntimeArtifactModel, + RuntimeConfigModel, + VpcConfigModel, +) + +if TYPE_CHECKING: + from .build import Build + +logger = logging.getLogger(__name__) + + +class Agent: + """Represents a Bedrock AgentCore Runtime with Build strategy support. + + Each Agent instance manages a single runtime. Use Project.from_json() + to load agents from configuration files. + + Example: + from bedrock_agentcore.runtime import Agent + from bedrock_agentcore.runtime.build import ECR, DirectCodeDeploy + + # Pre-built ECR image + agent = Agent( + name="my-agent", + build=ECR(image_uri="123456789.dkr.ecr.us-west-2.amazonaws.com/my-agent:latest"), + ) + agent.launch() + + # Build from source with CodeBuild + ECR + agent = Agent( + name="my-agent", + build=ECR(source_path="./agent-src", entrypoint="main.py:app"), + ) + agent.launch() # Builds and launches + + # Direct code deploy (zip to S3) + agent = Agent( + name="my-agent", + build=DirectCodeDeploy(source_path="./agent-src", entrypoint="main.py:app"), + ) + agent.launch() + + Attributes: + name: Agent name + config: Runtime configuration model + runtime_arn: ARN of deployed runtime (if deployed) + runtime_id: ID of deployed runtime (if deployed) + is_deployed: Whether the agent is deployed + """ + + def __init__( + self, + name: str, + build: "Build", + description: Optional[str] = None, + network_mode: str = "PUBLIC", + security_groups: Optional[List[str]] = None, + subnets: Optional[List[str]] = None, + environment_variables: Optional[Dict[str, str]] = None, + tags: Optional[Dict[str, str]] = None, + region: Optional[str] = None, + ): + """Create an Agent instance with a build strategy. + + Args: + name: Unique agent name (used for runtime name) + build: Build strategy (PrebuiltImage, CodeBuild, LocalBuild, or DirectCodeDeploy) + description: Optional description of the agent + network_mode: "PUBLIC" or "VPC" + security_groups: Security group IDs (required if network_mode="VPC") + subnets: Subnet IDs (required if network_mode="VPC") + environment_variables: Environment variables for the container + tags: Resource tags + region: AWS region (defaults to boto3 default or us-west-2) + """ + self._name = name + self._region = region or boto3.Session().region_name or "us-west-2" + self._runtime_id: Optional[str] = None + self._runtime_arn: Optional[str] = None + self._build_strategy: "Build" = build + + # Build config model + vpc_config = None + if network_mode == "VPC" and security_groups and subnets: + vpc_config = VpcConfigModel(securityGroups=security_groups, subnets=subnets) + + network_config = NetworkConfigurationModel( + networkMode=NetworkMode(network_mode), + vpcConfig=vpc_config, + ) + + # Build artifact config from build strategy if image_uri available + artifact = None + if build.image_uri: + artifact = RuntimeArtifactModel(imageUri=build.image_uri) + + # Build the build config for serialization + build_config = self._create_build_config(build) + + self._config = RuntimeConfigModel( + name=name, + description=description, + artifact=artifact, + build=build_config, + networkConfiguration=network_config, + environmentVariables=environment_variables, + tags=tags, + ) + + # Initialize boto3 clients + user_agent_extra = build_user_agent_suffix() + client_config = Config(user_agent_extra=user_agent_extra) + + self._control_plane = boto3.client( + "bedrock-agentcore-control", + region_name=self._region, + config=client_config, + ) + self._data_plane = boto3.client( + "bedrock-agentcore", + region_name=self._region, + config=client_config, + ) + + logger.info("Initialized Agent '%s' in region %s", name, self._region) + + # ==================== PROPERTIES ==================== + + @property + def name(self) -> str: + """Agent name.""" + return self._name + + @property + def config(self) -> RuntimeConfigModel: + """Current configuration.""" + return self._config + + @property + def runtime_arn(self) -> Optional[str]: + """Runtime ARN if deployed.""" + return self._runtime_arn + + @property + def runtime_id(self) -> Optional[str]: + """Runtime ID if deployed.""" + return self._runtime_id + + @property + def is_deployed(self) -> bool: + """Whether agent is deployed (has runtime ARN).""" + return self._runtime_arn is not None + + @property + def image_uri(self) -> Optional[str]: + """Current image URI from the build strategy.""" + return self._build_strategy.image_uri + + @property + def build_strategy(self) -> "Build": + """Build strategy for this agent.""" + return self._build_strategy + + # ==================== OPERATIONS ==================== + + def build_and_launch( + self, + tag: str = "latest", + max_wait_build: int = 600, + max_wait_launch: int = 600, + poll_interval: int = 10, + ) -> Dict[str, Any]: + """Build, push, and launch the agent in one step. + + This is the primary method for deploying an agent. It handles: + 1. Building and pushing the artifact (via build strategy's launch()) + 2. Creating or updating the runtime in AWS (via launch()) + + For ECR strategy: builds container and pushes to ECR, then launches runtime + For DirectCodeDeploy: packages code and uploads to S3, then launches runtime + For pre-built images: skips build, just launches runtime + + This method is idempotent - it will create the runtime if it doesn't exist, + or update it if it does. + + Args: + tag: Image tag for build (default: "latest") + max_wait_build: Maximum seconds to wait for build + max_wait_launch: Maximum seconds to wait for launch + poll_interval: Seconds between status checks + + Returns: + Launch result including runtime ARN and status + + Raises: + RuntimeError: If build or launch fails + """ + # Launch artifact (build + push) if image not yet available + if not self._build_strategy.image_uri: + logger.info("Launching build artifact...") + self._build_strategy.validate_prerequisites() + + result = self._build_strategy.launch( + agent_name=self._name, + region_name=self._region, + tag=tag, + max_wait=max_wait_build, + ) + + # Update the config artifact with the built image + if result.get("imageUri"): + self._config.artifact = RuntimeArtifactModel(imageUri=result["imageUri"]) + logger.info("Artifact ready. Image URI: %s", result["imageUri"]) + + # Launch the agent runtime (create or update) + return self.launch(max_wait=max_wait_launch, poll_interval=poll_interval) + + def launch( + self, + max_wait: int = 600, + poll_interval: int = 10, + ) -> Dict[str, Any]: + """Deploy the agent to AWS (create or update). + + This method is idempotent - it will create the runtime if it doesn't exist, + or update it if it already exists. + + Waits for the runtime to become ACTIVE before returning. + + Args: + max_wait: Max seconds to wait + poll_interval: Seconds between status checks + + Returns: + Runtime details dict + + Raises: + ClientError: If AWS API call fails + TimeoutError: If wait times out + """ + # Get image URI (either provided or built) + current_image_uri = self.image_uri + if not current_image_uri: + raise ValueError( + "Cannot launch agent without image_uri. " + "Either provide image_uri or call build_and_launch() for source-based agents." + ) + + # Check if runtime already exists + self._refresh_runtime_state() + + if self._runtime_id: + # Runtime exists - update it + return self._update_runtime(current_image_uri, max_wait, poll_interval) + else: + # Runtime doesn't exist - create it + return self._create_runtime(current_image_uri, max_wait, poll_interval) + + def _create_runtime( + self, + image_uri: str, + max_wait: int, + poll_interval: int, + ) -> Dict[str, Any]: + """Create a new agent runtime.""" + params: Dict[str, Any] = { + "agentRuntimeName": self._name, + "agentRuntimeArtifact": { + "containerConfiguration": { + "containerUri": image_uri, + }, + }, + } + + if self._config.description: + params["description"] = self._config.description + + if self._config.network_configuration: + network_config: Dict[str, Any] = { + "networkMode": self._config.network_configuration.network_mode.value, + } + if self._config.network_configuration.vpc_config: + network_config["vpcConfiguration"] = { + "securityGroupIds": self._config.network_configuration.vpc_config.security_groups, + "subnetIds": self._config.network_configuration.vpc_config.subnets, + } + params["networkConfiguration"] = network_config + + if self._config.environment_variables: + params["environmentVariables"] = self._config.environment_variables + + logger.info("Creating agent runtime '%s'...", self._name) + + try: + response = self._control_plane.create_agent_runtime(**params) + self._runtime_arn = response.get("agentRuntimeArn") + self._runtime_id = response.get("agentRuntimeId") + + logger.info("Created runtime with ARN: %s", self._runtime_arn) + + return self._wait_for_active(max_wait, poll_interval) + + except ClientError as e: + logger.error("Failed to create agent runtime: %s", e) + raise + + def _update_runtime( + self, + image_uri: str, + max_wait: int, + poll_interval: int, + ) -> Dict[str, Any]: + """Update an existing agent runtime.""" + params: Dict[str, Any] = { + "agentRuntimeId": self._runtime_id, + "agentRuntimeArtifact": { + "containerConfiguration": { + "containerUri": image_uri, + }, + }, + } + + if self._config.description: + params["description"] = self._config.description + + if self._config.network_configuration: + network_config: Dict[str, Any] = { + "networkMode": self._config.network_configuration.network_mode.value, + } + if self._config.network_configuration.vpc_config: + network_config["vpcConfiguration"] = { + "securityGroupIds": self._config.network_configuration.vpc_config.security_groups, + "subnetIds": self._config.network_configuration.vpc_config.subnets, + } + params["networkConfiguration"] = network_config + + if self._config.environment_variables: + params["environmentVariables"] = self._config.environment_variables + + logger.info("Updating agent runtime '%s'...", self._name) + + try: + response = self._control_plane.update_agent_runtime(**params) + self._runtime_arn = response.get("agentRuntimeArn") + + logger.info("Updated runtime with ARN: %s", self._runtime_arn) + + return self._wait_for_active(max_wait, poll_interval) + + except ClientError as e: + logger.error("Failed to update agent runtime: %s", e) + raise + + def invoke( + self, + payload: Union[Dict[str, Any], str, bytes], + session_id: Optional[str] = None, + endpoint_name: str = "DEFAULT", + ) -> Dict[str, Any]: + """Invoke the agent with a payload. + + Args: + payload: Request payload (dict will be JSON-encoded) + session_id: Session ID for stateful interactions + endpoint_name: Endpoint qualifier + + Returns: + Response dict with payload and metadata + + Raises: + ValueError: If agent is not deployed + ClientError: If AWS API call fails + """ + if not self._runtime_arn: + raise ValueError("Agent is not deployed. Call launch() first.") + + # Encode payload + if isinstance(payload, dict): + payload_bytes = json.dumps(payload).encode("utf-8") + elif isinstance(payload, str): + payload_bytes = payload.encode("utf-8") + else: + payload_bytes = payload + + params: Dict[str, Any] = { + "agentRuntimeArn": self._runtime_arn, + "payload": payload_bytes, + "qualifier": endpoint_name, + } + + if session_id: + params["sessionId"] = session_id + + logger.debug("Invoking agent with payload...") + + response = self._data_plane.invoke_agent_runtime(**params) + + # Parse response payload + response_payload = response.get("payload", b"") + if isinstance(response_payload, bytes): + try: + response_payload = json.loads(response_payload.decode("utf-8")) + except (json.JSONDecodeError, UnicodeDecodeError): + pass + + return { + "payload": response_payload, + "sessionId": response.get("sessionId"), + "contentType": response.get("contentType"), + } + + def stop_session(self, session_id: str) -> Dict[str, Any]: + """Stop a specific runtime session. + + Args: + session_id: Session to stop + + Returns: + Stop operation result + + Raises: + ValueError: If agent is not deployed + ClientError: If AWS API call fails + """ + if not self._runtime_arn: + raise ValueError("Agent is not deployed. Call launch() first.") + + logger.info("Stopping session '%s'...", session_id) + + response = self._data_plane.stop_agent_runtime_session( + agentRuntimeArn=self._runtime_arn, + sessionId=session_id, + ) + + return dict(response) + + def destroy( + self, + max_wait: int = 300, + poll_interval: int = 10, + ) -> Dict[str, Any]: + """Delete the runtime from AWS. + + Waits for deletion to complete before returning. + + Args: + max_wait: Max seconds to wait + poll_interval: Seconds between status checks + + Returns: + Deletion result + + Raises: + ValueError: If agent is not deployed + ClientError: If AWS API call fails + """ + if not self._runtime_id: + logger.warning("Agent '%s' is not deployed, nothing to destroy", self._name) + return {"status": "NOT_DEPLOYED"} + + logger.info("Destroying agent '%s'...", self._name) + + try: + response = self._control_plane.delete_agent_runtime( + agentRuntimeId=self._runtime_id, + ) + + self._wait_for_deleted(max_wait, poll_interval) + + # Clear state + self._runtime_arn = None + self._runtime_id = None + + logger.info("Agent '%s' destroyed", self._name) + return dict(response) + + except ClientError as e: + if e.response["Error"]["Code"] == "ResourceNotFoundException": + logger.warning("Agent '%s' not found, may already be deleted", self._name) + self._runtime_arn = None + self._runtime_id = None + return {"status": "NOT_FOUND"} + raise + + # ==================== HELPERS ==================== + + def _create_build_config(self, build: "Build") -> BuildConfigModel: + """Create a BuildConfigModel from a Build strategy for serialization. + + Args: + build: Build strategy instance + + Returns: + BuildConfigModel for YAML serialization + """ + from .build import DirectCodeDeploy, ECR + + if isinstance(build, ECR): + return BuildConfigModel( + strategy=BuildStrategyType.ECR, + imageUri=build.image_uri, + sourcePath=build.source_path, + entrypoint=build.entrypoint, + ) + elif isinstance(build, DirectCodeDeploy): + return BuildConfigModel( + strategy=BuildStrategyType.DIRECT_CODE_DEPLOY, + sourcePath=build.source_path, + entrypoint=build.entrypoint, + s3Bucket=build._s3_bucket, + ) + else: + # Unknown strategy - try to serialize with minimal info + return BuildConfigModel( + strategy=BuildStrategyType.ECR, + imageUri=build.image_uri, + ) + + def _refresh_runtime_state(self) -> None: + """Fetch current runtime state from AWS by name.""" + try: + paginator = self._control_plane.get_paginator("list_agent_runtimes") + + for page in paginator.paginate(): + for runtime in page.get("agentRuntimeSummaries", []): + if runtime.get("agentRuntimeName") == self._name: + self._runtime_id = runtime.get("agentRuntimeId") + self._runtime_arn = runtime.get("agentRuntimeArn") + logger.debug( + "Found existing runtime: %s (ARN: %s)", + self._runtime_id, + self._runtime_arn, + ) + return + + logger.debug("No existing runtime found for agent '%s'", self._name) + + except ClientError as e: + logger.warning("Failed to refresh runtime state: %s", e) + + def _wait_for_active(self, max_wait: int, poll_interval: int) -> Dict[str, Any]: + """Poll until runtime is ACTIVE. + + Args: + max_wait: Maximum seconds to wait + poll_interval: Seconds between polls + + Returns: + Final runtime details + + Raises: + TimeoutError: If max_wait exceeded + RuntimeError: If runtime enters FAILED state + """ + if not self._runtime_id: + raise ValueError("No runtime ID to wait for") + + start_time = time.time() + logger.info("Waiting for runtime to become ACTIVE...") + + while time.time() - start_time < max_wait: + try: + response = self._control_plane.get_agent_runtime( + agentRuntimeId=self._runtime_id, + ) + + status = response.get("status") + logger.debug("Runtime status: %s", status) + + if status == "ACTIVE": + logger.info("Runtime is ACTIVE") + return dict(response) + + if status == "FAILED": + raise RuntimeError( + f"Runtime failed to launch: {response.get('failureReason', 'Unknown')}" + ) + + time.sleep(poll_interval) + + except ClientError as e: + logger.warning("Error checking runtime status: %s", e) + time.sleep(poll_interval) + + raise TimeoutError(f"Timeout waiting for runtime to become ACTIVE after {max_wait}s") + + def _wait_for_deleted(self, max_wait: int, poll_interval: int) -> None: + """Poll until runtime is deleted. + + Args: + max_wait: Maximum seconds to wait + poll_interval: Seconds between polls + + Raises: + TimeoutError: If max_wait exceeded + """ + if not self._runtime_id: + return + + start_time = time.time() + logger.info("Waiting for runtime deletion...") + + while time.time() - start_time < max_wait: + try: + response = self._control_plane.get_agent_runtime( + agentRuntimeId=self._runtime_id, + ) + + status = response.get("status") + logger.debug("Runtime status: %s", status) + + if status == "DELETING": + time.sleep(poll_interval) + continue + + except ClientError as e: + if e.response["Error"]["Code"] == "ResourceNotFoundException": + logger.info("Runtime deleted") + return + raise + + time.sleep(poll_interval) + + raise TimeoutError(f"Timeout waiting for runtime deletion after {max_wait}s") diff --git a/src/bedrock_agentcore/runtime/build.py b/src/bedrock_agentcore/runtime/build.py new file mode 100644 index 00000000..dbc7e29d --- /dev/null +++ b/src/bedrock_agentcore/runtime/build.py @@ -0,0 +1,381 @@ +"""Build strategies for Bedrock AgentCore agent deployments. + +This module provides an abstract Build class and concrete implementations +for different build/deployment strategies: + +- ECR: Deploy container images to ECR (via CodeBuild or pre-built image) +- DirectCodeDeploy: Package Python code as zip for direct deployment to S3 + +Example: + from bedrock_agentcore.runtime import Agent + from bedrock_agentcore.runtime.build import ECR, DirectCodeDeploy + + # Build from source with CodeBuild and push to ECR + agent = Agent( + name="my-agent", + build=ECR(source_path="./agent-src", entrypoint="main.py:app"), + ) + + # Use pre-built docker image + agent = Agent( + name="my-agent", + build=ECR(image_uri="123456789.dkr.ecr.us-west-2.amazonaws.com/my-agent:latest"), + ) + + # Direct code deploy (no container) + agent = Agent( + name="my-agent", + build=DirectCodeDeploy(source_path="./agent-src", entrypoint="main.py:app"), + ) +""" + +import logging +import os +import shutil +import tempfile +import zipfile +from abc import ABC, abstractmethod +from typing import Any, Dict, Optional + +logger = logging.getLogger(__name__) + + +class Build(ABC): + """Abstract base class for build strategies. + + Subclasses implement different methods for building and packaging + agent code for deployment to Bedrock AgentCore. + """ + + @abstractmethod + def launch( + self, + agent_name: str, + region_name: Optional[str] = None, + tag: str = "latest", + max_wait: int = 600, + ) -> Dict[str, Any]: + """Build and push the agent code to target repository. + + This builds the code (if needed) and pushes to the target + (ECR for container strategies, S3 for direct code deploy). + + Args: + agent_name: Name of the agent + region_name: AWS region name + tag: Image/version tag + max_wait: Maximum seconds to wait for launch + + Returns: + Dictionary with launch results including: + - imageUri or packageUri depending on strategy + - status: Launch status + """ + pass + + @property + @abstractmethod + def image_uri(self) -> Optional[str]: + """Return the image URI if available (after launch or for pre-built).""" + pass + + def validate_prerequisites(self) -> None: # noqa: B027 + """Validate that prerequisites for this build strategy are met. + + This is a hook that subclasses can optionally override. The default + implementation does nothing (no prerequisites required). + + Raises: + RuntimeError: If prerequisites are not met + """ + pass + + +class ECR(Build): + """Deploy container images to ECR. + + This strategy supports two modes: + 1. Build from source using AWS CodeBuild (provide source_path and entrypoint) + 2. Use a pre-built docker image (provide image_uri) + + Example: + # Build from source with CodeBuild + build = ECR(source_path="./my-agent", entrypoint="agent.py:app") + + # Use pre-built image + build = ECR(image_uri="123456789.dkr.ecr.us-west-2.amazonaws.com/my-agent:latest") + """ + + def __init__( + self, + source_path: Optional[str] = None, + entrypoint: Optional[str] = None, + image_uri: Optional[str] = None, + ): + """Initialize ECR build strategy. + + Provide either (source_path + entrypoint) for CodeBuild, or image_uri for pre-built. + + Args: + source_path: Path to agent source code (for CodeBuild) + entrypoint: Entry point e.g. "main.py:app" (for CodeBuild) + image_uri: Pre-built ECR image URI + + Raises: + ValueError: If neither source_path nor image_uri is provided + """ + if image_uri: + self._mode = "prebuilt" + self._image_uri = image_uri + self._source_path = None + self._entrypoint = None + elif source_path and entrypoint: + self._mode = "codebuild" + self._image_uri: Optional[str] = None + self._source_path = source_path + self._entrypoint = entrypoint + else: + raise ValueError( + "Must provide either image_uri (pre-built) or both source_path and entrypoint (CodeBuild)" + ) + + @property + def mode(self) -> str: + """Return the build mode ('prebuilt' or 'codebuild').""" + return self._mode + + @property + def source_path(self) -> Optional[str]: + """Return the source path (None for pre-built).""" + return self._source_path + + @property + def entrypoint(self) -> Optional[str]: + """Return the entrypoint (None for pre-built).""" + return self._entrypoint + + @property + def image_uri(self) -> Optional[str]: + """Return the image URI.""" + return self._image_uri + + def launch( + self, + agent_name: str, + region_name: Optional[str] = None, + tag: str = "latest", + max_wait: int = 600, + ) -> Dict[str, Any]: + """Build and push the container image to ECR. + + For pre-built images, this returns the provided image URI. + For CodeBuild mode, this always builds and pushes to ECR. + + Args: + agent_name: Name of the agent + region_name: AWS region name + tag: Image tag + max_wait: Maximum seconds to wait for launch + + Returns: + Dictionary with: + - imageUri: ECR image URI + - status: "SUCCEEDED" or "READY" + """ + if self._mode == "prebuilt": + logger.info("Using pre-built image: %s", self._image_uri) + return { + "imageUri": self._image_uri, + "status": "READY", + } + + # CodeBuild mode - always build and push + logger.info("Building and pushing image with CodeBuild...") + from .builder import build_and_push + + result = build_and_push( + source_path=self._source_path, + agent_name=agent_name, + entrypoint=self._entrypoint, + region_name=region_name, + tag=tag, + wait=True, + max_wait=max_wait, + ) + + self._image_uri = result.get("imageUri") + logger.info("Launch complete. Image URI: %s", self._image_uri) + return result + + +class DirectCodeDeploy(Build): + """Package Python code as zip for direct deployment to S3. + + This strategy packages Python code as a zip file and uploads to S3 + for direct deployment to Bedrock AgentCore. No container build required. + + Example: + build = DirectCodeDeploy( + source_path="./my-agent", + entrypoint="agent.py:app", + ) + """ + + def __init__( + self, + source_path: str, + entrypoint: str, + s3_bucket: Optional[str] = None, + auto_create_bucket: bool = True, + ): + """Initialize direct code deploy strategy. + + Args: + source_path: Path to agent source code + entrypoint: Entry point (e.g., "main.py:app") + s3_bucket: S3 bucket for code packages. If None, auto-generates. + auto_create_bucket: Create bucket if it doesn't exist + """ + self._source_path = source_path + self._entrypoint = entrypoint + self._s3_bucket = s3_bucket + self._auto_create_bucket = auto_create_bucket + self._package_uri: Optional[str] = None + + @property + def source_path(self) -> str: + """Return the source path.""" + return self._source_path + + @property + def entrypoint(self) -> str: + """Return the entrypoint.""" + return self._entrypoint + + @property + def image_uri(self) -> Optional[str]: + """Return the image URI (always None for direct code deploy).""" + return None + + @property + def package_uri(self) -> Optional[str]: + """Return the S3 package URI after deploy.""" + return self._package_uri + + def validate_prerequisites(self) -> None: + """Validate that zip utility is available.""" + if not shutil.which("zip"): + raise RuntimeError("zip utility not found. Install zip to use direct code deploy.") + + def launch( + self, + agent_name: str, + region_name: Optional[str] = None, + tag: str = "latest", + max_wait: int = 600, + ) -> Dict[str, Any]: + """Package Python code and upload to S3. + + This always packages and uploads the code to ensure the latest + source changes are deployed. + + Args: + agent_name: Name of the agent + region_name: AWS region name + tag: Version tag for the package + max_wait: Maximum seconds to wait (unused) + + Returns: + Dictionary with: + - packageUri: S3 URI of the code package + - s3Bucket: Bucket name + - s3Key: Object key + - status: "SUCCEEDED" + """ + import boto3 + from botocore.exceptions import ClientError + + source_path = os.path.abspath(self._source_path) + if not os.path.exists(source_path): + raise FileNotFoundError(f"Source path not found: {source_path}") + + region = region_name or boto3.Session().region_name or "us-west-2" + sts_client = boto3.client("sts") + account_id = sts_client.get_caller_identity()["Account"] + + # Determine bucket name + bucket_name = self._s3_bucket + if not bucket_name: + bucket_name = f"bedrock-agentcore-code-{account_id}-{region}" + + # Ensure bucket exists + s3_client = boto3.client("s3", region_name=region) + if self._auto_create_bucket: + try: + s3_client.head_bucket(Bucket=bucket_name) + except ClientError as e: + if e.response["Error"]["Code"] == "404": + logger.info("Creating S3 bucket '%s'...", bucket_name) + if region == "us-east-1": + s3_client.create_bucket(Bucket=bucket_name) + else: + s3_client.create_bucket( + Bucket=bucket_name, + CreateBucketConfiguration={"LocationConstraint": region}, + ) + else: + raise + + # Create zip package + with tempfile.TemporaryDirectory() as temp_dir: + zip_path = os.path.join(temp_dir, "code.zip") + self._create_code_package(source_path, zip_path) + + # Upload to S3 + s3_key = f"{agent_name}/{tag}/code.zip" + logger.info("Uploading code package to s3://%s/%s", bucket_name, s3_key) + s3_client.upload_file(zip_path, bucket_name, s3_key) + + self._package_uri = f"s3://{bucket_name}/{s3_key}" + logger.info("Launch complete. Package URI: %s", self._package_uri) + + return { + "packageUri": self._package_uri, + "s3Bucket": bucket_name, + "s3Key": s3_key, + "status": "SUCCEEDED", + "entrypoint": self._entrypoint, + } + + def _create_code_package(self, source_path: str, output_path: str) -> str: + """Create a zip package of the source code.""" + exclude_dirs = { + ".git", "__pycache__", ".venv", "venv", "node_modules", + ".pytest_cache", ".mypy_cache", ".ruff_cache", "dist", "build", + "*.egg-info", + } + + exclude_patterns = { + "*.pyc", "*.pyo", "*.pyd", ".DS_Store", "*.so", + ".env", ".env.*", "*.log", + } + + with zipfile.ZipFile(output_path, "w", zipfile.ZIP_DEFLATED) as zipf: + for root, dirs, files in os.walk(source_path): + dirs[:] = [d for d in dirs if d not in exclude_dirs] + + for file in files: + if any(self._matches_pattern(file, p) for p in exclude_patterns): + continue + + file_path = os.path.join(root, file) + arcname = os.path.relpath(file_path, source_path) + zipf.write(file_path, arcname) + + logger.debug("Created code package: %s", output_path) + return output_path + + def _matches_pattern(self, filename: str, pattern: str) -> bool: + """Check if filename matches a glob pattern.""" + import fnmatch + return fnmatch.fnmatch(filename, pattern) diff --git a/src/bedrock_agentcore/runtime/builder.py b/src/bedrock_agentcore/runtime/builder.py new file mode 100644 index 00000000..9688044a --- /dev/null +++ b/src/bedrock_agentcore/runtime/builder.py @@ -0,0 +1,475 @@ +"""Docker build operations for Bedrock AgentCore Runtime. + +This module provides functions for building Docker images +for container-based agent deployments using AWS CodeBuild. +""" + +import json +import logging +import os +import tempfile +import time +import uuid +import zipfile +from typing import Any, Dict, Optional + +import boto3 +from botocore.config import Config +from botocore.exceptions import ClientError + +from bedrock_agentcore._utils.user_agent import build_user_agent_suffix + +from .ecr import ensure_ecr_repository +from .iam import get_or_create_codebuild_execution_role + +logger = logging.getLogger(__name__) + +# Default Dockerfile template for Python agents +DEFAULT_DOCKERFILE_TEMPLATE = '''FROM python:3.12-slim + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y --no-install-recommends \\ + gcc \\ + && rm -rf /var/lib/apt/lists/* + +# Copy requirements first for better caching +COPY requirements.txt* pyproject.toml* ./ + +# Install Python dependencies +RUN if [ -f "requirements.txt" ]; then pip install --no-cache-dir -r requirements.txt; fi +RUN if [ -f "pyproject.toml" ]; then pip install --no-cache-dir .; fi + +# Copy application code +COPY . . + +# Install bedrock-agentcore SDK +RUN pip install --no-cache-dir bedrock-agentcore + +# Expose port +EXPOSE 8080 + +# Set entrypoint +CMD ["python", "-m", "{entrypoint_module}"] +''' + + +def get_codebuild_client(region_name: Optional[str] = None) -> Any: + """Get a CodeBuild client with proper user agent. + + Args: + region_name: AWS region name + + Returns: + boto3 CodeBuild client + """ + user_agent_extra = build_user_agent_suffix() + client_config = Config(user_agent_extra=user_agent_extra) + return boto3.client("codebuild", region_name=region_name, config=client_config) + + +def get_s3_client(region_name: Optional[str] = None) -> Any: + """Get an S3 client with proper user agent. + + Args: + region_name: AWS region name + + Returns: + boto3 S3 client + """ + user_agent_extra = build_user_agent_suffix() + client_config = Config(user_agent_extra=user_agent_extra) + return boto3.client("s3", region_name=region_name, config=client_config) + + +def generate_dockerfile( + source_path: str, + entrypoint: str, + output_path: Optional[str] = None, +) -> str: + """Generate a Dockerfile for the agent. + + Args: + source_path: Path to agent source code + entrypoint: Entry point (e.g., "agent.py:app" or "agent") + output_path: Optional path to write Dockerfile (default: source_path/Dockerfile) + + Returns: + Path to generated Dockerfile + """ + # Parse entrypoint to get module name + if ":" in entrypoint: + module_part = entrypoint.split(":")[0] + else: + module_part = entrypoint + + # Remove .py extension if present + if module_part.endswith(".py"): + module_part = module_part[:-3] + + # Generate Dockerfile content + dockerfile_content = DEFAULT_DOCKERFILE_TEMPLATE.format(entrypoint_module=module_part) + + # Determine output path + if output_path is None: + output_path = os.path.join(source_path, "Dockerfile") + + # Write Dockerfile + with open(output_path, "w") as f: + f.write(dockerfile_content) + + logger.info("Generated Dockerfile at: %s", output_path) + return output_path + + +def _create_source_zip(source_path: str, output_path: str) -> str: + """Create a zip file of the source code for CodeBuild. + + Args: + source_path: Path to agent source code + output_path: Path to write zip file + + Returns: + Path to created zip file + """ + source_path = os.path.abspath(source_path) + + with zipfile.ZipFile(output_path, "w", zipfile.ZIP_DEFLATED) as zipf: + for root, dirs, files in os.walk(source_path): + # Skip common directories + dirs[:] = [d for d in dirs if d not in [".git", "__pycache__", ".venv", "venv", "node_modules"]] + + for file in files: + file_path = os.path.join(root, file) + arcname = os.path.relpath(file_path, source_path) + zipf.write(file_path, arcname) + + logger.debug("Created source zip: %s", output_path) + return output_path + + +def _ensure_source_bucket( + agent_name: str, + region_name: Optional[str] = None, +) -> str: + """Ensure an S3 bucket exists for CodeBuild source code. + + Args: + agent_name: Name of the agent + region_name: AWS region name + + Returns: + Bucket name + """ + s3_client = get_s3_client(region_name) + sts_client = boto3.client("sts") + + account_id = sts_client.get_caller_identity()["Account"] + region = region_name or boto3.Session().region_name or "us-west-2" + + bucket_name = f"bedrock-agentcore-codebuild-{account_id}-{region}" + + try: + s3_client.head_bucket(Bucket=bucket_name) + logger.debug("S3 bucket '%s' already exists", bucket_name) + except ClientError as e: + if e.response["Error"]["Code"] == "404": + logger.info("Creating S3 bucket '%s'...", bucket_name) + if region == "us-east-1": + s3_client.create_bucket(Bucket=bucket_name) + else: + s3_client.create_bucket( + Bucket=bucket_name, + CreateBucketConfiguration={"LocationConstraint": region}, + ) + else: + raise + + return bucket_name + + +def build_image_codebuild( + source_path: str, + image_name: str, + ecr_repository_uri: str, + tag: str = "latest", + region_name: Optional[str] = None, + wait: bool = True, + max_wait: int = 600, + poll_interval: int = 10, +) -> Dict[str, Any]: + """Build a Docker image using AWS CodeBuild. + + This method uploads source code to S3, creates a CodeBuild project, + and runs the build to produce an ARM64 image in ECR. + + Args: + source_path: Path to agent source code + image_name: Name for the Docker image + ecr_repository_uri: ECR repository URI + tag: Image tag + region_name: AWS region name + wait: Wait for build to complete + max_wait: Maximum seconds to wait + poll_interval: Seconds between status checks + + Returns: + Dictionary with build details + + Raises: + RuntimeError: If build fails + TimeoutError: If wait times out + """ + codebuild_client = get_codebuild_client(region_name) + s3_client = get_s3_client(region_name) + sts_client = boto3.client("sts") + + account_id = sts_client.get_caller_identity()["Account"] + region = region_name or boto3.Session().region_name or "us-west-2" + + source_path = os.path.abspath(source_path) + + # Verify Dockerfile exists (should be generated before calling this function) + dockerfile = os.path.join(source_path, "Dockerfile") + if not os.path.exists(dockerfile): + raise FileNotFoundError(f"Dockerfile not found at: {dockerfile}") + + # Ensure S3 bucket for source code + bucket_name = _ensure_source_bucket(image_name, region_name) + + # Get ECR repository ARN + ecr_repo_name = ecr_repository_uri.split("/")[-1].split(":")[0] + ecr_repository_arn = f"arn:aws:ecr:{region}:{account_id}:repository/{ecr_repo_name}" + + # Ensure CodeBuild IAM role + role_result = get_or_create_codebuild_execution_role( + agent_name=image_name, + ecr_repository_arn=ecr_repository_arn, + region_name=region_name, + source_bucket_name=bucket_name, + ) + codebuild_role_arn = role_result["roleArn"] + + # Create and upload source zip + with tempfile.TemporaryDirectory() as temp_dir: + zip_path = os.path.join(temp_dir, "source.zip") + _create_source_zip(source_path, zip_path) + + # Upload to S3 + s3_key = f"{image_name}/{uuid.uuid4().hex}/source.zip" + logger.info("Uploading source code to s3://%s/%s", bucket_name, s3_key) + s3_client.upload_file(zip_path, bucket_name, s3_key) + + # Create/update CodeBuild project + project_name = f"bedrock-agentcore-{image_name}"[:255] + full_image_uri = f"{ecr_repository_uri}:{tag}" + + ecr_registry = f"{account_id}.dkr.ecr.{region}.amazonaws.com" + ecr_login_cmd = ( + f"aws ecr get-login-password --region {region} | " + f"docker login --username AWS --password-stdin {ecr_registry}" + ) + + buildspec = { + "version": "0.2", + "phases": { + "pre_build": { + "commands": [ + "echo Logging in to Amazon ECR...", + ecr_login_cmd, + ] + }, + "build": { + "commands": [ + "echo Build started on `date`", + f"docker build -t {full_image_uri} .", + ] + }, + "post_build": { + "commands": [ + "echo Build completed on `date`", + "echo Pushing the Docker image...", + f"docker push {full_image_uri}", + ] + }, + }, + } + + project_config = { + "name": project_name, + "description": f"Build project for Bedrock AgentCore agent: {image_name}", + "source": { + "type": "S3", + "location": f"{bucket_name}/{s3_key}", + }, + "artifacts": {"type": "NO_ARTIFACTS"}, + "environment": { + "type": "ARM_CONTAINER", + "computeType": "BUILD_GENERAL1_SMALL", + "image": "aws/codebuild/amazonlinux2-aarch64-standard:3.0", + "privilegedMode": True, + "environmentVariables": [ + {"name": "AWS_DEFAULT_REGION", "value": region}, + {"name": "AWS_ACCOUNT_ID", "value": account_id}, + {"name": "IMAGE_REPO_NAME", "value": ecr_repo_name}, + {"name": "IMAGE_TAG", "value": tag}, + ], + }, + "serviceRole": codebuild_role_arn, + "buildSpec": json.dumps(buildspec), + "tags": [ + {"key": "CreatedBy", "value": "bedrock-agentcore-sdk"}, + {"key": "AgentName", "value": image_name}, + ], + } + + # Create or update project + try: + codebuild_client.create_project(**project_config) + logger.info("Created CodeBuild project: %s", project_name) + except ClientError as e: + if e.response["Error"]["Code"] == "ResourceAlreadyExistsException": + codebuild_client.update_project(**project_config) + logger.info("Updated CodeBuild project: %s", project_name) + else: + raise + + # Start build + logger.info("Starting CodeBuild build for '%s'...", image_name) + response = codebuild_client.start_build(projectName=project_name) + build_id = response["build"]["id"] + + if not wait: + return { + "buildId": build_id, + "projectName": project_name, + "imageUri": full_image_uri, + "status": "IN_PROGRESS", + } + + # Wait for build to complete + return _wait_for_codebuild( + codebuild_client=codebuild_client, + build_id=build_id, + image_uri=full_image_uri, + max_wait=max_wait, + poll_interval=poll_interval, + ) + + +def _wait_for_codebuild( + codebuild_client: Any, + build_id: str, + image_uri: str, + max_wait: int, + poll_interval: int, +) -> Dict[str, Any]: + """Wait for a CodeBuild build to complete. + + Args: + codebuild_client: boto3 CodeBuild client + build_id: CodeBuild build ID + image_uri: Expected image URI + max_wait: Maximum seconds to wait + poll_interval: Seconds between status checks + + Returns: + Dictionary with build result + + Raises: + RuntimeError: If build fails + TimeoutError: If wait times out + """ + start_time = time.time() + logger.info("Waiting for CodeBuild build to complete...") + + while time.time() - start_time < max_wait: + response = codebuild_client.batch_get_builds(ids=[build_id]) + build = response["builds"][0] + status = build["buildStatus"] + + logger.debug("CodeBuild status: %s", status) + + if status == "SUCCEEDED": + logger.info("CodeBuild build succeeded") + return { + "buildId": build_id, + "imageUri": image_uri, + "status": "SUCCEEDED", + "buildOutput": build.get("logs", {}), + } + + if status in ["FAILED", "FAULT", "STOPPED", "TIMED_OUT"]: + raise RuntimeError(f"CodeBuild build failed with status: {status}") + + time.sleep(poll_interval) + + raise TimeoutError(f"Timeout waiting for CodeBuild build after {max_wait}s") + + +def build_and_push( + source_path: str, + agent_name: str, + entrypoint: str, + region_name: Optional[str] = None, + tag: str = "latest", + wait: bool = True, + max_wait: int = 600, +) -> Dict[str, Any]: + """Build a Docker image using CodeBuild and push to ECR. + + This is the main entry point for container builds. It handles: + 1. Creating ECR repository (auto-generated name) + 2. Generating Dockerfile if not present + 3. Building ARM64 image via CodeBuild + 4. Pushing to ECR + + Args: + source_path: Path to agent source code + agent_name: Name of the agent + entrypoint: Entry point (e.g., "agent.py:app") + region_name: AWS region name + tag: Image tag + wait: Wait for build to complete + max_wait: Maximum seconds to wait + + Returns: + Dictionary with build result including imageUri + + Raises: + FileNotFoundError: If source path doesn't exist + RuntimeError: If build fails + """ + source_path = os.path.abspath(source_path) + + if not os.path.exists(source_path): + raise FileNotFoundError(f"Source path not found: {source_path}") + + # Auto-generate ECR repository name + ecr_repository = f"bedrock-agentcore/{agent_name}" + + # Ensure ECR repository exists + ecr_result = ensure_ecr_repository(ecr_repository, region_name) + ecr_repository_uri = ecr_result["repositoryUri"] + + # Generate Dockerfile if not present + dockerfile_path = os.path.join(source_path, "Dockerfile") + if not os.path.exists(dockerfile_path): + logger.info("No Dockerfile found, generating one...") + generate_dockerfile(source_path, entrypoint) + + # Build image using CodeBuild + logger.info("Building image using CodeBuild...") + result = build_image_codebuild( + source_path=source_path, + image_name=agent_name, + ecr_repository_uri=ecr_repository_uri, + tag=tag, + region_name=region_name, + wait=wait, + max_wait=max_wait, + ) + + logger.info("Build complete. Image URI: %s", result.get("imageUri")) + return result diff --git a/src/bedrock_agentcore/runtime/config.py b/src/bedrock_agentcore/runtime/config.py new file mode 100644 index 00000000..6bfee53b --- /dev/null +++ b/src/bedrock_agentcore/runtime/config.py @@ -0,0 +1,110 @@ +"""Configuration models for Bedrock AgentCore Runtime. + +This module provides Pydantic models for Agent runtime configuration +with YAML serialization support. +""" + +from enum import Enum +from typing import Dict, List, Optional + +from pydantic import BaseModel, ConfigDict, Field + + +class RuntimeStatus(str, Enum): + """Runtime status values.""" + + CREATING = "CREATING" + ACTIVE = "ACTIVE" + UPDATING = "UPDATING" + DELETING = "DELETING" + FAILED = "FAILED" + NOT_FOUND = "NOT_FOUND" + + +class NetworkMode(str, Enum): + """Network mode options.""" + + PUBLIC = "PUBLIC" + VPC = "VPC" + + +class VpcConfigModel(BaseModel): + """VPC configuration for runtime networking.""" + + model_config = ConfigDict(populate_by_name=True) + + security_groups: List[str] = Field(alias="securityGroups") + subnets: List[str] + + +class NetworkConfigurationModel(BaseModel): + """Network configuration for runtime deployment.""" + + model_config = ConfigDict(populate_by_name=True) + + network_mode: NetworkMode = Field(default=NetworkMode.PUBLIC, alias="networkMode") + vpc_config: Optional[VpcConfigModel] = Field(default=None, alias="vpcConfig") + + +class RuntimeArtifactModel(BaseModel): + """Container artifact configuration.""" + + model_config = ConfigDict(populate_by_name=True) + + image_uri: str = Field(alias="imageUri") + + +class BuildStrategyType(str, Enum): + """Build strategy type options.""" + + ECR = "ecr" + DIRECT_CODE_DEPLOY = "direct_code_deploy" + + +class BuildConfigModel(BaseModel): + """Build configuration for agent deployment. + + Attributes: + strategy: Build strategy type (ecr, direct_code_deploy) + image_uri: Pre-built image URI (for ECR with pre-built mode) + source_path: Path to agent source code directory (for CodeBuild or DirectCodeDeploy) + entrypoint: Entry point e.g. "agent.py:app" (for CodeBuild or DirectCodeDeploy) + s3_bucket: S3 bucket for direct code deploy + """ + + model_config = ConfigDict(populate_by_name=True) + + strategy: BuildStrategyType = Field(default=BuildStrategyType.ECR) + image_uri: Optional[str] = Field(default=None, alias="imageUri") + source_path: Optional[str] = Field(default=None, alias="sourcePath") + entrypoint: Optional[str] = None + s3_bucket: Optional[str] = Field(default=None, alias="s3Bucket") + + +class RuntimeConfigModel(BaseModel): + """Complete runtime configuration model. + + This model represents the configuration for a Bedrock AgentCore runtime, + suitable for YAML serialization and deserialization. + + Attributes: + name: Unique runtime name + description: Optional description + artifact: Container image configuration + build: Build configuration for source-based deployment + network_configuration: Network settings (PUBLIC or VPC) + environment_variables: Environment variables for the container + tags: Resource tags + """ + + model_config = ConfigDict(populate_by_name=True) + + name: str + description: Optional[str] = None + artifact: Optional[RuntimeArtifactModel] = None + build: Optional[BuildConfigModel] = None + network_configuration: Optional[NetworkConfigurationModel] = Field( + default=None, alias="networkConfiguration" + ) + environment_variables: Optional[Dict[str, str]] = Field(default=None, alias="environmentVariables") + tags: Optional[Dict[str, str]] = None diff --git a/src/bedrock_agentcore/runtime/ecr.py b/src/bedrock_agentcore/runtime/ecr.py new file mode 100644 index 00000000..b9075b86 --- /dev/null +++ b/src/bedrock_agentcore/runtime/ecr.py @@ -0,0 +1,264 @@ +"""ECR operations for Bedrock AgentCore Runtime. + +This module provides functions for managing ECR repositories +for container-based agent deployments. +""" + +import base64 +import logging +import subprocess +from typing import Any, Dict, Optional, Tuple + +import boto3 +from botocore.config import Config +from botocore.exceptions import ClientError + +from bedrock_agentcore._utils.user_agent import build_user_agent_suffix + +logger = logging.getLogger(__name__) + + +def get_ecr_client(region_name: Optional[str] = None) -> Any: + """Get an ECR client with proper user agent. + + Args: + region_name: AWS region name + + Returns: + boto3 ECR client + """ + user_agent_extra = build_user_agent_suffix() + client_config = Config(user_agent_extra=user_agent_extra) + return boto3.client("ecr", region_name=region_name, config=client_config) + + +def ensure_ecr_repository( + repository_name: str, + region_name: Optional[str] = None, +) -> Dict[str, Any]: + """Ensure an ECR repository exists, creating it if needed. + + This is an idempotent operation - if the repository already exists, + it returns the existing repository details. + + Args: + repository_name: Name of the ECR repository + region_name: AWS region name + + Returns: + Dictionary with repository details including repositoryUri + + Raises: + ClientError: If repository creation fails (other than already exists) + """ + ecr_client = get_ecr_client(region_name) + + # Try to describe existing repository + try: + response = ecr_client.describe_repositories(repositoryNames=[repository_name]) + repository = response["repositories"][0] + logger.info("ECR repository '%s' already exists", repository_name) + return { + "repositoryName": repository["repositoryName"], + "repositoryUri": repository["repositoryUri"], + "repositoryArn": repository["repositoryArn"], + "created": False, + } + except ClientError as e: + if e.response["Error"]["Code"] != "RepositoryNotFoundException": + raise + + # Create repository + logger.info("Creating ECR repository '%s'...", repository_name) + try: + response = ecr_client.create_repository( + repositoryName=repository_name, + imageScanningConfiguration={"scanOnPush": True}, + imageTagMutability="MUTABLE", + ) + repository = response["repository"] + logger.info("Created ECR repository: %s", repository["repositoryUri"]) + return { + "repositoryName": repository["repositoryName"], + "repositoryUri": repository["repositoryUri"], + "repositoryArn": repository["repositoryArn"], + "created": True, + } + except ClientError as e: + if e.response["Error"]["Code"] == "RepositoryAlreadyExistsException": + # Race condition - repository was created between describe and create + response = ecr_client.describe_repositories(repositoryNames=[repository_name]) + repository = response["repositories"][0] + return { + "repositoryName": repository["repositoryName"], + "repositoryUri": repository["repositoryUri"], + "repositoryArn": repository["repositoryArn"], + "created": False, + } + raise + + +def delete_ecr_repository( + repository_name: str, + region_name: Optional[str] = None, + force: bool = False, +) -> Dict[str, Any]: + """Delete an ECR repository. + + Args: + repository_name: Name of the ECR repository + region_name: AWS region name + force: If True, delete even if repository contains images + + Returns: + Dictionary with deletion status + + Raises: + ClientError: If deletion fails + """ + ecr_client = get_ecr_client(region_name) + + try: + ecr_client.delete_repository(repositoryName=repository_name, force=force) + logger.info("Deleted ECR repository '%s'", repository_name) + return {"status": "DELETED", "repositoryName": repository_name} + except ClientError as e: + if e.response["Error"]["Code"] == "RepositoryNotFoundException": + logger.warning("ECR repository '%s' not found", repository_name) + return {"status": "NOT_FOUND", "repositoryName": repository_name} + raise + + +def get_ecr_login_credentials(region_name: Optional[str] = None) -> Tuple[str, str, str]: + """Get ECR login credentials for Docker authentication. + + Args: + region_name: AWS region name + + Returns: + Tuple of (username, password, registry_url) + + Raises: + ClientError: If unable to get authorization token + """ + ecr_client = get_ecr_client(region_name) + + response = ecr_client.get_authorization_token() + auth_data = response["authorizationData"][0] + + # Decode the token (base64 encoded "username:password") + token = base64.b64decode(auth_data["authorizationToken"]).decode("utf-8") + username, password = token.split(":") + registry_url = auth_data["proxyEndpoint"] + + return username, password, registry_url + + +def docker_login_to_ecr(region_name: Optional[str] = None) -> bool: + """Perform Docker login to ECR. + + Args: + region_name: AWS region name + + Returns: + True if login succeeded + + Raises: + RuntimeError: If Docker login fails + """ + username, password, registry_url = get_ecr_login_credentials(region_name) + + logger.info("Logging into ECR registry: %s", registry_url) + + # Use docker login command + result = subprocess.run( + ["docker", "login", "--username", username, "--password-stdin", registry_url], + input=password, + capture_output=True, + text=True, + ) + + if result.returncode != 0: + raise RuntimeError(f"Docker login to ECR failed: {result.stderr}") + + logger.info("Successfully logged into ECR") + return True + + +def push_image_to_ecr( + local_image: str, + repository_uri: str, + tag: str = "latest", + region_name: Optional[str] = None, +) -> Dict[str, Any]: + """Push a local Docker image to ECR. + + Args: + local_image: Local image name (e.g., "my-agent:latest") + repository_uri: ECR repository URI + tag: Image tag + region_name: AWS region name + + Returns: + Dictionary with push details including full image URI + + Raises: + RuntimeError: If Docker operations fail + """ + # Login to ECR + docker_login_to_ecr(region_name) + + full_uri = f"{repository_uri}:{tag}" + + # Tag the image + logger.info("Tagging image '%s' as '%s'", local_image, full_uri) + result = subprocess.run( + ["docker", "tag", local_image, full_uri], + capture_output=True, + text=True, + ) + if result.returncode != 0: + raise RuntimeError(f"Failed to tag image: {result.stderr}") + + # Push the image + logger.info("Pushing image to ECR: %s", full_uri) + result = subprocess.run( + ["docker", "push", full_uri], + capture_output=True, + text=True, + ) + if result.returncode != 0: + raise RuntimeError(f"Failed to push image: {result.stderr}") + + logger.info("Successfully pushed image to ECR: %s", full_uri) + return { + "imageUri": full_uri, + "repositoryUri": repository_uri, + "tag": tag, + } + + +def get_account_id() -> str: + """Get the current AWS account ID. + + Returns: + AWS account ID string + """ + sts_client = boto3.client("sts") + return str(sts_client.get_caller_identity()["Account"]) + + +def build_ecr_uri(repository_name: str, region_name: Optional[str] = None, tag: str = "latest") -> str: + """Build the full ECR URI for a repository. + + Args: + repository_name: Name of the ECR repository + region_name: AWS region name + tag: Image tag + + Returns: + Full ECR URI (e.g., "123456789012.dkr.ecr.us-west-2.amazonaws.com/my-repo:latest") + """ + account_id = get_account_id() + region = region_name or boto3.Session().region_name or "us-west-2" + return f"{account_id}.dkr.ecr.{region}.amazonaws.com/{repository_name}:{tag}" diff --git a/src/bedrock_agentcore/runtime/iam.py b/src/bedrock_agentcore/runtime/iam.py new file mode 100644 index 00000000..897f3576 --- /dev/null +++ b/src/bedrock_agentcore/runtime/iam.py @@ -0,0 +1,442 @@ +"""IAM operations for Bedrock AgentCore Runtime. + +This module provides functions for managing IAM roles +for container-based agent deployments. +""" + +import hashlib +import json +import logging +from typing import Any, Dict, List, Optional + +import boto3 +from botocore.config import Config +from botocore.exceptions import ClientError + +from bedrock_agentcore._utils.user_agent import build_user_agent_suffix + +logger = logging.getLogger(__name__) + + +def get_iam_client(region_name: Optional[str] = None) -> Any: + """Get an IAM client with proper user agent. + + Args: + region_name: AWS region name + + Returns: + boto3 IAM client + """ + user_agent_extra = build_user_agent_suffix() + client_config = Config(user_agent_extra=user_agent_extra) + return boto3.client("iam", region_name=region_name, config=client_config) + + +def _generate_deterministic_suffix(agent_name: str) -> str: + """Generate a deterministic suffix from agent name using SHA-256. + + Args: + agent_name: Name of the agent + + Returns: + 10-character deterministic suffix + """ + hash_obj = hashlib.sha256(agent_name.encode("utf-8")) + return hash_obj.hexdigest()[:10] + + +def _get_runtime_trust_policy(region: str, account_id: str) -> Dict[str, Any]: + """Get the trust policy for the runtime execution role. + + Args: + region: AWS region + account_id: AWS account ID + + Returns: + Trust policy document + """ + return { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Principal": {"Service": "bedrock-agentcore.amazonaws.com"}, + "Action": "sts:AssumeRole", + "Condition": {"StringEquals": {"aws:SourceAccount": account_id}}, + } + ], + } + + +def _get_runtime_execution_policy( + region: str, + account_id: str, + ecr_repository_arn: Optional[str] = None, +) -> Dict[str, Any]: + """Get the execution policy for the runtime role. + + Args: + region: AWS region + account_id: AWS account ID + ecr_repository_arn: Optional ECR repository ARN + + Returns: + Execution policy document + """ + statements: List[Dict[str, Any]] = [ + { + "Sid": "CloudWatchLogs", + "Effect": "Allow", + "Action": [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents", + ], + "Resource": f"arn:aws:logs:{region}:{account_id}:log-group:/aws/bedrock-agentcore/*", + }, + { + "Sid": "ECRAuth", + "Effect": "Allow", + "Action": ["ecr:GetAuthorizationToken"], + "Resource": "*", + }, + ] + + # Add ECR pull permissions if repository specified + if ecr_repository_arn: + statements.append( + { + "Sid": "ECRPull", + "Effect": "Allow", + "Action": [ + "ecr:BatchCheckLayerAvailability", + "ecr:GetDownloadUrlForLayer", + "ecr:BatchGetImage", + ], + "Resource": ecr_repository_arn, + } + ) + + return { + "Version": "2012-10-17", + "Statement": statements, + } + + +def _get_codebuild_trust_policy(account_id: str) -> Dict[str, Any]: + """Get the trust policy for the CodeBuild execution role. + + Args: + account_id: AWS account ID + + Returns: + Trust policy document + """ + return { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Principal": {"Service": "codebuild.amazonaws.com"}, + "Action": "sts:AssumeRole", + "Condition": {"StringEquals": {"aws:SourceAccount": account_id}}, + } + ], + } + + +def _get_codebuild_execution_policy( + region: str, + account_id: str, + ecr_repository_arn: str, + source_bucket_name: Optional[str] = None, +) -> Dict[str, Any]: + """Get the execution policy for the CodeBuild role. + + Args: + region: AWS region + account_id: AWS account ID + ecr_repository_arn: ECR repository ARN + source_bucket_name: Optional S3 bucket for source code + + Returns: + Execution policy document + """ + statements: List[Dict[str, Any]] = [ + { + "Sid": "CloudWatchLogs", + "Effect": "Allow", + "Action": [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents", + ], + "Resource": f"arn:aws:logs:{region}:{account_id}:log-group:/aws/codebuild/*", + }, + { + "Sid": "ECRAuth", + "Effect": "Allow", + "Action": ["ecr:GetAuthorizationToken"], + "Resource": "*", + }, + { + "Sid": "ECRPush", + "Effect": "Allow", + "Action": [ + "ecr:BatchCheckLayerAvailability", + "ecr:GetDownloadUrlForLayer", + "ecr:BatchGetImage", + "ecr:PutImage", + "ecr:InitiateLayerUpload", + "ecr:UploadLayerPart", + "ecr:CompleteLayerUpload", + ], + "Resource": ecr_repository_arn, + }, + ] + + # Add S3 permissions if bucket specified + if source_bucket_name: + statements.append( + { + "Sid": "S3Access", + "Effect": "Allow", + "Action": [ + "s3:GetObject", + "s3:GetObjectVersion", + "s3:PutObject", + "s3:ListBucket", + ], + "Resource": [ + f"arn:aws:s3:::{source_bucket_name}", + f"arn:aws:s3:::{source_bucket_name}/*", + ], + } + ) + + return { + "Version": "2012-10-17", + "Statement": statements, + } + + +def get_or_create_runtime_execution_role( + agent_name: str, + region_name: Optional[str] = None, + role_name: Optional[str] = None, + ecr_repository_arn: Optional[str] = None, +) -> Dict[str, Any]: + """Get or create the IAM execution role for a runtime. + + This is an idempotent operation - if the role already exists, + it returns the existing role details. + + Args: + agent_name: Name of the agent (used for deterministic naming) + region_name: AWS region name + role_name: Optional explicit role name (otherwise auto-generated) + ecr_repository_arn: Optional ECR repository ARN for pull permissions + + Returns: + Dictionary with role details including roleArn + + Raises: + ClientError: If role creation fails + """ + iam_client = get_iam_client(region_name) + sts_client = boto3.client("sts") + + account_id = sts_client.get_caller_identity()["Account"] + region = region_name or boto3.Session().region_name or "us-west-2" + + # Generate deterministic role name if not provided + if not role_name: + suffix = _generate_deterministic_suffix(agent_name) + role_name = f"AmazonBedrockAgentCoreSDKRuntime-{region}-{suffix}" + + # Try to get existing role + try: + response = iam_client.get_role(RoleName=role_name) + role = response["Role"] + logger.info("IAM role '%s' already exists", role_name) + return { + "roleName": role["RoleName"], + "roleArn": role["Arn"], + "created": False, + } + except ClientError as e: + if e.response["Error"]["Code"] != "NoSuchEntity": + raise + + # Create role + logger.info("Creating IAM execution role '%s'...", role_name) + + trust_policy = _get_runtime_trust_policy(region, account_id) + execution_policy = _get_runtime_execution_policy(region, account_id, ecr_repository_arn) + + try: + response = iam_client.create_role( + RoleName=role_name, + AssumeRolePolicyDocument=json.dumps(trust_policy), + Description=f"Execution role for Bedrock AgentCore runtime: {agent_name}", + Tags=[ + {"Key": "CreatedBy", "Value": "bedrock-agentcore-sdk"}, + {"Key": "AgentName", "Value": agent_name}, + ], + ) + role_arn = response["Role"]["Arn"] + + # Attach inline policy + iam_client.put_role_policy( + RoleName=role_name, + PolicyName="ExecutionPolicy", + PolicyDocument=json.dumps(execution_policy), + ) + + logger.info("Created IAM execution role: %s", role_arn) + return { + "roleName": role_name, + "roleArn": role_arn, + "created": True, + } + + except ClientError as e: + if e.response["Error"]["Code"] == "EntityAlreadyExists": + # Race condition - role was created between get and create + response = iam_client.get_role(RoleName=role_name) + return { + "roleName": response["Role"]["RoleName"], + "roleArn": response["Role"]["Arn"], + "created": False, + } + raise + + +def get_or_create_codebuild_execution_role( + agent_name: str, + ecr_repository_arn: str, + region_name: Optional[str] = None, + role_name: Optional[str] = None, + source_bucket_name: Optional[str] = None, +) -> Dict[str, Any]: + """Get or create the IAM execution role for CodeBuild. + + This is an idempotent operation - if the role already exists, + it returns the existing role details. + + Args: + agent_name: Name of the agent (used for deterministic naming) + ecr_repository_arn: ECR repository ARN for push permissions + region_name: AWS region name + role_name: Optional explicit role name (otherwise auto-generated) + source_bucket_name: Optional S3 bucket for source code + + Returns: + Dictionary with role details including roleArn + + Raises: + ClientError: If role creation fails + """ + iam_client = get_iam_client(region_name) + sts_client = boto3.client("sts") + + account_id = sts_client.get_caller_identity()["Account"] + region = region_name or boto3.Session().region_name or "us-west-2" + + # Generate deterministic role name if not provided + if not role_name: + suffix = _generate_deterministic_suffix(agent_name) + role_name = f"AmazonBedrockAgentCoreSDKCodeBuild-{region}-{suffix}" + + # Try to get existing role + try: + response = iam_client.get_role(RoleName=role_name) + role = response["Role"] + logger.info("CodeBuild IAM role '%s' already exists", role_name) + return { + "roleName": role["RoleName"], + "roleArn": role["Arn"], + "created": False, + } + except ClientError as e: + if e.response["Error"]["Code"] != "NoSuchEntity": + raise + + # Create role + logger.info("Creating CodeBuild IAM role '%s'...", role_name) + + trust_policy = _get_codebuild_trust_policy(account_id) + execution_policy = _get_codebuild_execution_policy( + region, account_id, ecr_repository_arn, source_bucket_name + ) + + try: + response = iam_client.create_role( + RoleName=role_name, + AssumeRolePolicyDocument=json.dumps(trust_policy), + Description=f"CodeBuild role for Bedrock AgentCore agent: {agent_name}", + Tags=[ + {"Key": "CreatedBy", "Value": "bedrock-agentcore-sdk"}, + {"Key": "AgentName", "Value": agent_name}, + ], + ) + role_arn = response["Role"]["Arn"] + + # Attach inline policy + iam_client.put_role_policy( + RoleName=role_name, + PolicyName="CodeBuildExecutionPolicy", + PolicyDocument=json.dumps(execution_policy), + ) + + logger.info("Created CodeBuild IAM role: %s", role_arn) + return { + "roleName": role_name, + "roleArn": role_arn, + "created": True, + } + + except ClientError as e: + if e.response["Error"]["Code"] == "EntityAlreadyExists": + # Race condition + response = iam_client.get_role(RoleName=role_name) + return { + "roleName": response["Role"]["RoleName"], + "roleArn": response["Role"]["Arn"], + "created": False, + } + raise + + +def delete_role(role_name: str, region_name: Optional[str] = None) -> Dict[str, Any]: + """Delete an IAM role and its inline policies. + + Args: + role_name: Name of the IAM role + region_name: AWS region name + + Returns: + Dictionary with deletion status + + Raises: + ClientError: If deletion fails + """ + iam_client = get_iam_client(region_name) + + try: + # First, delete inline policies + policies = iam_client.list_role_policies(RoleName=role_name) + for policy_name in policies.get("PolicyNames", []): + iam_client.delete_role_policy(RoleName=role_name, PolicyName=policy_name) + logger.debug("Deleted inline policy: %s", policy_name) + + # Then delete the role + iam_client.delete_role(RoleName=role_name) + logger.info("Deleted IAM role '%s'", role_name) + return {"status": "DELETED", "roleName": role_name} + + except ClientError as e: + if e.response["Error"]["Code"] == "NoSuchEntity": + logger.warning("IAM role '%s' not found", role_name) + return {"status": "NOT_FOUND", "roleName": role_name} + raise diff --git a/tests/bedrock_agentcore/memory/test_config.py b/tests/bedrock_agentcore/memory/test_config.py new file mode 100644 index 00000000..74660494 --- /dev/null +++ b/tests/bedrock_agentcore/memory/test_config.py @@ -0,0 +1,113 @@ +"""Tests for memory configuration models.""" + +import pytest + +from bedrock_agentcore.memory.config import ( + MemoryConfigModel, + StrategyConfigModel, + StrategyType, +) + + +class TestStrategyType: + """Tests for StrategyType enum.""" + + def test_strategy_type_values(self) -> None: + """Test that all expected strategy type values exist.""" + assert StrategyType.SEMANTIC == "SEMANTIC" + assert StrategyType.SUMMARY == "SUMMARY" + assert StrategyType.USER_PREFERENCE == "USER_PREFERENCE" + assert StrategyType.CUSTOM_SEMANTIC == "CUSTOM_SEMANTIC" + + +class TestStrategyConfigModel: + """Tests for StrategyConfigModel.""" + + def test_create_with_alias(self) -> None: + """Test creating StrategyConfigModel with alias names.""" + config = StrategyConfigModel( + type=StrategyType.SEMANTIC, + namespace="facts/{sessionId}/", + ) + assert config.strategy_type == StrategyType.SEMANTIC + assert config.namespace == "facts/{sessionId}/" + assert config.custom_prompt is None + + def test_create_with_custom_prompt(self) -> None: + """Test creating StrategyConfigModel with custom prompt.""" + config = StrategyConfigModel( + type=StrategyType.CUSTOM_SEMANTIC, + namespace="custom/{sessionId}/", + customPrompt="Extract important facts from the conversation.", + ) + assert config.strategy_type == StrategyType.CUSTOM_SEMANTIC + assert config.namespace == "custom/{sessionId}/" + assert config.custom_prompt == "Extract important facts from the conversation." + + def test_dump_by_alias(self) -> None: + """Test dumping config with alias names.""" + config = StrategyConfigModel( + type=StrategyType.SUMMARY, + namespace="summaries/", + ) + data = config.model_dump(by_alias=True) + assert data["type"] == "SUMMARY" + assert data["namespace"] == "summaries/" + + +class TestMemoryConfigModel: + """Tests for MemoryConfigModel.""" + + def test_minimal_config(self) -> None: + """Test minimal config with just name.""" + config = MemoryConfigModel(name="test-memory") + assert config.name == "test-memory" + assert config.description is None + assert config.strategies is None + assert config.encryption_key_arn is None + assert config.tags is None + + def test_full_config(self) -> None: + """Test full config with all fields.""" + strategies = [ + StrategyConfigModel(type=StrategyType.SEMANTIC, namespace="facts/"), + StrategyConfigModel(type=StrategyType.SUMMARY, namespace="summaries/"), + ] + config = MemoryConfigModel( + name="test-memory", + description="Test memory description", + strategies=strategies, + encryptionKeyArn="arn:aws:kms:us-west-2:123456789012:key/abc123", + tags={"Environment": "test"}, + ) + assert config.name == "test-memory" + assert config.description == "Test memory description" + assert config.strategies is not None + assert len(config.strategies) == 2 + assert config.encryption_key_arn == "arn:aws:kms:us-west-2:123456789012:key/abc123" + assert config.tags == {"Environment": "test"} + + def test_dump_excludes_none(self) -> None: + """Test that dump excludes None values.""" + config = MemoryConfigModel(name="test-memory") + data = config.model_dump(by_alias=True, exclude_none=True) + assert "name" in data + assert "description" not in data + assert "strategies" not in data + assert "encryptionKeyArn" not in data + + def test_validate_from_dict(self) -> None: + """Test validating from dict (YAML-like structure).""" + data = { + "name": "test-memory", + "strategies": [ + {"type": "SEMANTIC", "namespace": "facts/"}, + {"type": "SUMMARY", "namespace": "summaries/"}, + ], + } + config = MemoryConfigModel.model_validate(data) + assert config.name == "test-memory" + assert config.strategies is not None + assert len(config.strategies) == 2 + assert config.strategies[0].strategy_type == StrategyType.SEMANTIC + assert config.strategies[1].strategy_type == StrategyType.SUMMARY diff --git a/tests/bedrock_agentcore/memory/test_memory.py b/tests/bedrock_agentcore/memory/test_memory.py new file mode 100644 index 00000000..fc6a9be9 --- /dev/null +++ b/tests/bedrock_agentcore/memory/test_memory.py @@ -0,0 +1,172 @@ +"""Tests for Memory class.""" + +from unittest.mock import MagicMock, patch + +import pytest + +from bedrock_agentcore.memory.config import StrategyType +from bedrock_agentcore.memory.memory import Memory + + +class TestMemoryInit: + """Tests for Memory initialization.""" + + @patch("bedrock_agentcore.memory.memory.MemoryClient") + def test_minimal_init(self, mock_client_class: MagicMock) -> None: + """Test minimal memory initialization.""" + mock_client = MagicMock() + mock_client.region_name = "us-west-2" + mock_client_class.return_value = mock_client + + memory = Memory(name="test-memory") + + assert memory.name == "test-memory" + assert memory.config.name == "test-memory" + assert memory.config.description is None + assert memory.config.strategies is None + assert memory.memory_id is None + + @patch("bedrock_agentcore.memory.memory.MemoryClient") + def test_full_init(self, mock_client_class: MagicMock) -> None: + """Test full memory initialization with all parameters.""" + mock_client = MagicMock() + mock_client.region_name = "us-west-2" + mock_client_class.return_value = mock_client + + memory = Memory( + name="test-memory", + description="Test memory description", + strategies=[ + {"type": "SEMANTIC", "namespace": "facts/{sessionId}/"}, + {"type": "SUMMARY", "namespace": "summaries/{sessionId}/"}, + ], + encryption_key_arn="arn:aws:kms:us-west-2:123456789012:key/abc123", + tags={"Environment": "test"}, + region="us-east-1", + ) + + assert memory.name == "test-memory" + assert memory.config.description == "Test memory description" + assert memory.config.strategies is not None + assert len(memory.config.strategies) == 2 + assert memory.config.strategies[0].strategy_type == StrategyType.SEMANTIC + assert memory.config.strategies[0].namespace == "facts/{sessionId}/" + assert memory.config.strategies[1].strategy_type == StrategyType.SUMMARY + assert memory.config.encryption_key_arn == "arn:aws:kms:us-west-2:123456789012:key/abc123" + assert memory.config.tags == {"Environment": "test"} + + @patch("bedrock_agentcore.memory.memory.MemoryClient") + def test_init_with_custom_prompt(self, mock_client_class: MagicMock) -> None: + """Test memory initialization with custom prompt strategy.""" + mock_client = MagicMock() + mock_client.region_name = "us-west-2" + mock_client_class.return_value = mock_client + + memory = Memory( + name="test-memory", + strategies=[ + { + "type": "CUSTOM_SEMANTIC", + "namespace": "custom/", + "customPrompt": "Extract key facts from conversation.", + }, + ], + ) + + assert memory.config.strategies is not None + assert len(memory.config.strategies) == 1 + assert memory.config.strategies[0].strategy_type == StrategyType.CUSTOM_SEMANTIC + assert memory.config.strategies[0].custom_prompt == "Extract key facts from conversation." + + +class TestMemoryIsActive: + """Tests for Memory is_active property.""" + + @patch("bedrock_agentcore.memory.memory.MemoryClient") + def test_is_active_without_memory_id(self, mock_client_class: MagicMock) -> None: + """Test is_active returns False when memory_id is not set.""" + mock_client = MagicMock() + mock_client.region_name = "us-west-2" + mock_client_class.return_value = mock_client + + memory = Memory(name="test-memory") + + assert memory.is_active is False + + @patch("bedrock_agentcore.memory.memory.MemoryClient") + def test_is_active_with_active_memory(self, mock_client_class: MagicMock) -> None: + """Test is_active returns True when memory is active.""" + mock_client = MagicMock() + mock_client.region_name = "us-west-2" + mock_client.get_memory_status.return_value = "ACTIVE" + mock_client_class.return_value = mock_client + + memory = Memory(name="test-memory") + memory._memory_id = "memory-123" + + assert memory.is_active is True + + +class TestMemoryOperations: + """Tests for Memory create/delete operations.""" + + @patch("bedrock_agentcore.memory.memory.MemoryClient") + def test_delete_without_memory_id(self, mock_client_class: MagicMock) -> None: + """Test delete when memory is not created.""" + mock_client = MagicMock() + mock_client.region_name = "us-west-2" + mock_client_class.return_value = mock_client + + memory = Memory(name="test-memory") + + result = memory.delete() + + assert result["status"] == "NOT_CREATED" + + @patch("bedrock_agentcore.memory.memory.MemoryClient") + def test_get_session_without_memory_id_raises(self, mock_client_class: MagicMock) -> None: + """Test that get_session raises ValueError when memory is not launched.""" + mock_client = MagicMock() + mock_client.region_name = "us-west-2" + mock_client_class.return_value = mock_client + + memory = Memory(name="test-memory") + + with pytest.raises(ValueError, match="Memory is not launched"): + memory.get_session(actor_id="user-123", session_id="session-456") + + @patch("bedrock_agentcore.memory.memory.MemoryClient") + def test_list_events_without_memory_id_raises(self, mock_client_class: MagicMock) -> None: + """Test that list_events raises ValueError when memory is not launched.""" + mock_client = MagicMock() + mock_client.region_name = "us-west-2" + mock_client_class.return_value = mock_client + + memory = Memory(name="test-memory") + + with pytest.raises(ValueError, match="Memory is not launched"): + memory.list_events(actor_id="user-123", session_id="session-456") + + @patch("bedrock_agentcore.memory.memory.MemoryClient") + def test_search_records_without_memory_id_raises(self, mock_client_class: MagicMock) -> None: + """Test that search_records raises ValueError when memory is not launched.""" + mock_client = MagicMock() + mock_client.region_name = "us-west-2" + mock_client_class.return_value = mock_client + + memory = Memory(name="test-memory") + + with pytest.raises(ValueError, match="Memory is not launched"): + memory.search_records(query="test", namespace="facts/") + + @patch("bedrock_agentcore.memory.memory.MemoryClient") + def test_add_strategy_without_memory_id_raises(self, mock_client_class: MagicMock) -> None: + """Test that add_strategy raises ValueError when memory is not launched.""" + mock_client = MagicMock() + mock_client.region_name = "us-west-2" + mock_client_class.return_value = mock_client + + memory = Memory(name="test-memory") + + with pytest.raises(ValueError, match="Memory is not launched"): + memory.add_strategy(strategy_type="SEMANTIC", namespace="facts/") diff --git a/tests/bedrock_agentcore/runtime/test_agent.py b/tests/bedrock_agentcore/runtime/test_agent.py new file mode 100644 index 00000000..a0ea80cf --- /dev/null +++ b/tests/bedrock_agentcore/runtime/test_agent.py @@ -0,0 +1,218 @@ +"""Tests for Agent class.""" + +import json +from unittest.mock import MagicMock, patch + +import pytest + +from bedrock_agentcore.runtime.agent import Agent +from bedrock_agentcore.runtime.build import DirectCodeDeploy, ECR +from bedrock_agentcore.runtime.config import NetworkMode + + +class TestAgentInit: + """Tests for Agent initialization.""" + + @patch("bedrock_agentcore.runtime.agent.boto3") + def test_minimal_init_with_ecr_prebuilt(self, mock_boto3: MagicMock) -> None: + """Test minimal agent initialization with ECR prebuilt image.""" + mock_boto3.Session.return_value.region_name = "us-west-2" + + build = ECR(image_uri="123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest") + agent = Agent( + name="test-agent", + build=build, + ) + + assert agent.name == "test-agent" + assert agent.config.name == "test-agent" + assert agent.config.artifact is not None + assert agent.config.artifact.image_uri == "123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest" + assert agent.runtime_arn is None + assert agent.runtime_id is None + assert agent.is_deployed is False + assert agent.image_uri == "123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest" + + @patch("bedrock_agentcore.runtime.agent.boto3") + def test_init_with_ecr_codebuild(self, mock_boto3: MagicMock) -> None: + """Test agent initialization with ECR CodeBuild strategy.""" + mock_boto3.Session.return_value.region_name = "us-west-2" + + build = ECR(source_path="./test-src", entrypoint="main.py:app") + agent = Agent( + name="test-agent", + build=build, + ) + + assert agent.name == "test-agent" + assert agent.build_strategy is build + assert agent.image_uri is None # Not yet built + assert agent.config.artifact is None # Not yet built + + @patch("bedrock_agentcore.runtime.agent.boto3") + def test_full_init(self, mock_boto3: MagicMock) -> None: + """Test full agent initialization with all parameters.""" + mock_boto3.Session.return_value.region_name = "us-west-2" + + build = ECR(image_uri="123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest") + agent = Agent( + name="test-agent", + build=build, + description="Test agent description", + network_mode="PUBLIC", + environment_variables={"LOG_LEVEL": "INFO"}, + tags={"Environment": "test"}, + region="us-east-1", + ) + + assert agent.name == "test-agent" + assert agent.config.description == "Test agent description" + assert agent.config.network_configuration is not None + assert agent.config.network_configuration.network_mode == NetworkMode.PUBLIC + assert agent.config.environment_variables == {"LOG_LEVEL": "INFO"} + assert agent.config.tags == {"Environment": "test"} + + @patch("bedrock_agentcore.runtime.agent.boto3") + def test_vpc_mode_init(self, mock_boto3: MagicMock) -> None: + """Test agent initialization with VPC mode.""" + mock_boto3.Session.return_value.region_name = "us-west-2" + + build = ECR(image_uri="123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest") + agent = Agent( + name="test-agent", + build=build, + network_mode="VPC", + security_groups=["sg-123", "sg-456"], + subnets=["subnet-abc", "subnet-def"], + ) + + assert agent.config.network_configuration is not None + assert agent.config.network_configuration.network_mode == NetworkMode.VPC + assert agent.config.network_configuration.vpc_config is not None + assert agent.config.network_configuration.vpc_config.security_groups == ["sg-123", "sg-456"] + assert agent.config.network_configuration.vpc_config.subnets == ["subnet-abc", "subnet-def"] + + +class TestAgentLaunch: + """Tests for Agent launch operations.""" + + @patch("bedrock_agentcore.runtime.agent.boto3") + def test_launch_without_built_image_raises(self, mock_boto3: MagicMock) -> None: + """Test that launch raises ValueError when source-based agent not built.""" + mock_boto3.Session.return_value.region_name = "us-west-2" + + build = ECR(source_path="./test-source", entrypoint="agent.py:app") + agent = Agent( + name="test-agent", + build=build, + ) + + with pytest.raises(ValueError, match="Cannot launch agent without image_uri"): + agent.launch() + + +class TestAgentInvoke: + """Tests for Agent invoke operations.""" + + @patch("bedrock_agentcore.runtime.agent.boto3") + def test_invoke_not_deployed_raises(self, mock_boto3: MagicMock) -> None: + """Test that invoke raises ValueError when not deployed.""" + mock_boto3.Session.return_value.region_name = "us-west-2" + + build = ECR(image_uri="123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest") + agent = Agent( + name="test-agent", + build=build, + ) + + with pytest.raises(ValueError, match="Agent is not deployed"): + agent.invoke({"message": "Hello"}) + + @patch("bedrock_agentcore.runtime.agent.boto3") + def test_invoke_with_dict_payload(self, mock_boto3: MagicMock) -> None: + """Test invoke with dictionary payload.""" + mock_boto3.Session.return_value.region_name = "us-west-2" + + mock_data_plane = MagicMock() + response_payload = json.dumps({"response": "Hello back!"}).encode("utf-8") + mock_data_plane.invoke_agent_runtime.return_value = { + "payload": response_payload, + "sessionId": "session-123", + "contentType": "application/json", + } + mock_boto3.client.return_value = mock_data_plane + + build = ECR(image_uri="123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest") + agent = Agent( + name="test-agent", + build=build, + ) + # Simulate deployed state + agent._runtime_arn = "arn:aws:bedrock-agentcore:us-west-2:123456789012:agent-runtime/test-id" + + result = agent.invoke({"message": "Hello"}) + + assert result["payload"] == {"response": "Hello back!"} + assert result["sessionId"] == "session-123" + + +class TestAgentDestroy: + """Tests for Agent destroy operations.""" + + @patch("bedrock_agentcore.runtime.agent.boto3") + def test_destroy_not_deployed(self, mock_boto3: MagicMock) -> None: + """Test destroy when not deployed returns NOT_DEPLOYED status.""" + mock_boto3.Session.return_value.region_name = "us-west-2" + + build = ECR(image_uri="123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest") + agent = Agent( + name="test-agent", + build=build, + ) + + result = agent.destroy() + + assert result["status"] == "NOT_DEPLOYED" + + +class TestAgentBuildStrategies: + """Tests for Agent build strategy serialization.""" + + @patch("bedrock_agentcore.runtime.agent.boto3") + def test_build_config_ecr_prebuilt(self, mock_boto3: MagicMock) -> None: + """Test that ECR prebuilt is serialized correctly.""" + mock_boto3.Session.return_value.region_name = "us-west-2" + + build = ECR(image_uri="123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest") + agent = Agent(name="test-agent", build=build) + + assert agent.config.build is not None + assert agent.config.build.strategy.value == "ecr" + assert agent.config.build.image_uri == "123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest" + + @patch("bedrock_agentcore.runtime.agent.boto3") + def test_build_config_ecr_codebuild(self, mock_boto3: MagicMock) -> None: + """Test that ECR codebuild is serialized correctly.""" + mock_boto3.Session.return_value.region_name = "us-west-2" + + build = ECR(source_path="./src", entrypoint="main.py:app") + agent = Agent(name="test-agent", build=build) + + assert agent.config.build is not None + assert agent.config.build.strategy.value == "ecr" + assert agent.config.build.source_path == "./src" + assert agent.config.build.entrypoint == "main.py:app" + + @patch("bedrock_agentcore.runtime.agent.boto3") + def test_build_config_direct_code_deploy(self, mock_boto3: MagicMock) -> None: + """Test that DirectCodeDeploy is serialized correctly.""" + mock_boto3.Session.return_value.region_name = "us-west-2" + + build = DirectCodeDeploy(source_path="./src", entrypoint="main.py:app", s3_bucket="my-bucket") + agent = Agent(name="test-agent", build=build) + + assert agent.config.build is not None + assert agent.config.build.strategy.value == "direct_code_deploy" + assert agent.config.build.source_path == "./src" + assert agent.config.build.entrypoint == "main.py:app" + assert agent.config.build.s3_bucket == "my-bucket" diff --git a/tests/bedrock_agentcore/runtime/test_build.py b/tests/bedrock_agentcore/runtime/test_build.py new file mode 100644 index 00000000..2ac3cf36 --- /dev/null +++ b/tests/bedrock_agentcore/runtime/test_build.py @@ -0,0 +1,233 @@ +"""Tests for Build strategies.""" + +import tempfile +from pathlib import Path +from unittest.mock import MagicMock, patch + +import pytest + +from bedrock_agentcore.runtime.build import ( + Build, + DirectCodeDeploy, + ECR, +) + + +class TestECRPrebuilt: + """Tests for ECR with pre-built image.""" + + def test_mode_is_prebuilt(self) -> None: + """Test that mode is 'prebuilt' when image_uri is provided.""" + strategy = ECR(image_uri="123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest") + assert strategy.mode == "prebuilt" + + def test_image_uri(self) -> None: + """Test that image_uri is returned correctly.""" + image_uri = "123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest" + strategy = ECR(image_uri=image_uri) + assert strategy.image_uri == image_uri + + def test_source_path_is_none(self) -> None: + """Test that source_path is None for pre-built.""" + strategy = ECR(image_uri="123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest") + assert strategy.source_path is None + + def test_entrypoint_is_none(self) -> None: + """Test that entrypoint is None for pre-built.""" + strategy = ECR(image_uri="123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest") + assert strategy.entrypoint is None + + def test_launch_returns_image_uri(self) -> None: + """Test that launch() returns the image URI for pre-built.""" + image_uri = "123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest" + strategy = ECR(image_uri=image_uri) + result = strategy.launch(agent_name="test-agent") + assert result["imageUri"] == image_uri + assert result["status"] == "READY" + + +class TestECRCodeBuild: + """Tests for ECR with CodeBuild (source-based).""" + + def test_mode_is_codebuild(self) -> None: + """Test that mode is 'codebuild' when source_path is provided.""" + strategy = ECR(source_path="./test-src", entrypoint="main.py:app") + assert strategy.mode == "codebuild" + + def test_source_path_and_entrypoint(self) -> None: + """Test source_path and entrypoint are stored.""" + strategy = ECR(source_path="./test-src", entrypoint="main.py:app") + assert strategy.source_path == "./test-src" + assert strategy.entrypoint == "main.py:app" + + def test_image_uri_is_none_before_launch(self) -> None: + """Test image_uri is None before launch.""" + strategy = ECR(source_path="./test-src", entrypoint="main.py:app") + assert strategy.image_uri is None + + @patch("bedrock_agentcore.runtime.builder.build_and_push") + def test_launch_calls_builder(self, mock_build_and_push: MagicMock) -> None: + """Test that launch() delegates to builder module.""" + mock_build_and_push.return_value = { + "imageUri": "123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest", + "buildId": "build-123", + "status": "SUCCEEDED", + } + + strategy = ECR(source_path="/tmp/test-agent", entrypoint="main.py:app") + result = strategy.launch( + agent_name="test-agent", + region_name="us-west-2", + ) + + mock_build_and_push.assert_called_once() + assert result["status"] == "SUCCEEDED" + assert "imageUri" in result + assert strategy.image_uri == "123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest" + + +class TestECRValidation: + """Tests for ECR validation.""" + + def test_requires_image_uri_or_source_path(self) -> None: + """Test that either image_uri or source_path must be provided.""" + with pytest.raises(ValueError, match="Must provide either"): + ECR() + + def test_source_path_requires_entrypoint(self) -> None: + """Test that source_path requires entrypoint.""" + with pytest.raises(ValueError, match="Must provide either"): + ECR(source_path="./test-src") + + def test_entrypoint_requires_source_path(self) -> None: + """Test that entrypoint alone is not valid.""" + with pytest.raises(ValueError, match="Must provide either"): + ECR(entrypoint="main.py:app") + + +class TestDirectCodeDeploy: + """Tests for DirectCodeDeploy.""" + + def test_custom_bucket(self) -> None: + """Test custom S3 bucket specification.""" + strategy = DirectCodeDeploy(source_path="./test-src", entrypoint="main.py:app", s3_bucket="my-bucket") + assert strategy._s3_bucket == "my-bucket" + + def test_source_path_and_entrypoint(self) -> None: + """Test source_path and entrypoint are stored.""" + strategy = DirectCodeDeploy(source_path="./test-src", entrypoint="main.py:app") + assert strategy.source_path == "./test-src" + assert strategy.entrypoint == "main.py:app" + + def test_image_uri_is_none(self) -> None: + """Test that image_uri is always None (direct code deploy doesn't produce images).""" + strategy = DirectCodeDeploy(source_path="./test-src", entrypoint="main.py:app") + assert strategy.image_uri is None + + def test_package_uri_is_none_before_launch(self) -> None: + """Test that package_uri is None before launch.""" + strategy = DirectCodeDeploy(source_path="./test-src", entrypoint="main.py:app") + assert strategy.package_uri is None + + @patch("shutil.which") + def test_validate_prerequisites_with_zip(self, mock_which: MagicMock) -> None: + """Test validate_prerequisites passes with zip available.""" + mock_which.return_value = "/usr/bin/zip" + + strategy = DirectCodeDeploy(source_path="./test-src", entrypoint="main.py:app") + strategy.validate_prerequisites() # Should not raise + + @patch("shutil.which") + def test_validate_prerequisites_without_zip(self, mock_which: MagicMock) -> None: + """Test validate_prerequisites fails without zip.""" + mock_which.return_value = None + + strategy = DirectCodeDeploy(source_path="./test-src", entrypoint="main.py:app") + with pytest.raises(RuntimeError, match="zip utility not found"): + strategy.validate_prerequisites() + + def test_create_code_package(self) -> None: + """Test _create_code_package creates proper zip.""" + strategy = DirectCodeDeploy(source_path="./test-src", entrypoint="main.py:app") + + with tempfile.TemporaryDirectory() as temp_dir: + # Create test source files + source_dir = Path(temp_dir) / "source" + source_dir.mkdir() + (source_dir / "main.py").write_text("print('hello')") + (source_dir / "requirements.txt").write_text("boto3") + + # Create package + output_path = Path(temp_dir) / "output.zip" + strategy._create_code_package(str(source_dir), str(output_path)) + + assert output_path.exists() + + +class TestBuildAbstractClass: + """Tests for Build abstract class.""" + + def test_cannot_instantiate(self) -> None: + """Test that Build cannot be instantiated directly.""" + with pytest.raises(TypeError): + Build() # type: ignore + + def test_subclass_must_implement_methods(self) -> None: + """Test that subclass must implement abstract methods.""" + + class IncompleteBuild(Build): + pass + + with pytest.raises(TypeError): + IncompleteBuild() # type: ignore + + +class TestAgentWithBuildStrategy: + """Tests for Agent integration with Build strategies.""" + + @patch("bedrock_agentcore.runtime.agent.boto3") + def test_agent_with_ecr_prebuilt(self, mock_boto3: MagicMock) -> None: + """Test Agent with ECR pre-built image.""" + mock_boto3.Session.return_value.region_name = "us-west-2" + + from bedrock_agentcore.runtime import Agent + + build = ECR(image_uri="123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest") + agent = Agent( + name="test-agent", + build=build, + ) + + assert agent.build_strategy is build + assert agent.image_uri == "123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest" + + @patch("bedrock_agentcore.runtime.agent.boto3") + def test_agent_with_ecr_codebuild(self, mock_boto3: MagicMock) -> None: + """Test Agent with ECR CodeBuild.""" + mock_boto3.Session.return_value.region_name = "us-west-2" + + from bedrock_agentcore.runtime import Agent + + build = ECR(source_path="./test-src", entrypoint="main.py:app") + agent = Agent( + name="test-agent", + build=build, + ) + + assert agent.build_strategy is build + assert agent.image_uri is None # Not yet built + + @patch("bedrock_agentcore.runtime.agent.boto3") + def test_agent_with_direct_code_deploy(self, mock_boto3: MagicMock) -> None: + """Test Agent accepts DirectCodeDeploy.""" + mock_boto3.Session.return_value.region_name = "us-west-2" + + from bedrock_agentcore.runtime import Agent + + build = DirectCodeDeploy(source_path="./test-src", entrypoint="main.py:app") + agent = Agent( + name="test-agent", + build=build, + ) + + assert agent.build_strategy is build diff --git a/tests/bedrock_agentcore/runtime/test_builder.py b/tests/bedrock_agentcore/runtime/test_builder.py new file mode 100644 index 00000000..85fc5f7f --- /dev/null +++ b/tests/bedrock_agentcore/runtime/test_builder.py @@ -0,0 +1,213 @@ +"""Tests for Docker build operations.""" + +import os +import tempfile +from pathlib import Path +from unittest.mock import MagicMock, patch + +import pytest + +from bedrock_agentcore.runtime.builder import ( + build_and_push, + generate_dockerfile, + get_codebuild_client, + get_s3_client, +) + + +class TestGetClients: + """Tests for client factory functions.""" + + @patch("bedrock_agentcore.runtime.builder.boto3") + def test_get_codebuild_client(self, mock_boto3: MagicMock) -> None: + """Test CodeBuild client creation.""" + get_codebuild_client("us-west-2") + mock_boto3.client.assert_called_once() + call_kwargs = mock_boto3.client.call_args + assert call_kwargs[0][0] == "codebuild" + assert call_kwargs[1]["region_name"] == "us-west-2" + + @patch("bedrock_agentcore.runtime.builder.boto3") + def test_get_s3_client(self, mock_boto3: MagicMock) -> None: + """Test S3 client creation.""" + get_s3_client("us-west-2") + mock_boto3.client.assert_called_once() + call_kwargs = mock_boto3.client.call_args + assert call_kwargs[0][0] == "s3" + assert call_kwargs[1]["region_name"] == "us-west-2" + + +class TestGenerateDockerfile: + """Tests for generate_dockerfile.""" + + def test_generates_dockerfile(self) -> None: + """Test Dockerfile generation with entrypoint.""" + with tempfile.TemporaryDirectory() as temp_dir: + result = generate_dockerfile(temp_dir, "agent.py:app") + + # Verify file was created + dockerfile_path = os.path.join(temp_dir, "Dockerfile") + assert os.path.exists(dockerfile_path) + assert result == dockerfile_path + + # Verify content + with open(dockerfile_path, "r") as f: + content = f.read() + assert "FROM python:3.12-slim" in content + assert 'CMD ["python", "-m", "agent"]' in content + + def test_generates_dockerfile_module_only(self) -> None: + """Test Dockerfile generation with module name only.""" + with tempfile.TemporaryDirectory() as temp_dir: + result = generate_dockerfile(temp_dir, "myagent") + + with open(result, "r") as f: + content = f.read() + assert 'CMD ["python", "-m", "myagent"]' in content + + def test_generates_dockerfile_with_py_extension(self) -> None: + """Test Dockerfile generation with .py extension.""" + with tempfile.TemporaryDirectory() as temp_dir: + result = generate_dockerfile(temp_dir, "agent.py") + + with open(result, "r") as f: + content = f.read() + assert 'CMD ["python", "-m", "agent"]' in content + + def test_generates_dockerfile_custom_output(self) -> None: + """Test Dockerfile generation with custom output path.""" + with tempfile.TemporaryDirectory() as temp_dir: + custom_path = os.path.join(temp_dir, "custom", "Dockerfile.custom") + os.makedirs(os.path.dirname(custom_path), exist_ok=True) + + result = generate_dockerfile(temp_dir, "agent.py:app", output_path=custom_path) + + assert result == custom_path + assert os.path.exists(custom_path) + + +class TestBuildAndPush: + """Tests for build_and_push.""" + + def test_source_path_not_found(self) -> None: + """Test that FileNotFoundError is raised for missing source.""" + with pytest.raises(FileNotFoundError, match="Source path not found"): + build_and_push( + source_path="/nonexistent/path", + agent_name="test-agent", + entrypoint="agent.py:app", + region_name="us-west-2", + ) + + @patch("bedrock_agentcore.runtime.builder.build_image_codebuild") + @patch("bedrock_agentcore.runtime.builder.ensure_ecr_repository") + @patch("bedrock_agentcore.runtime.builder.generate_dockerfile") + def test_build_and_push_success( + self, + mock_generate: MagicMock, + mock_ensure_ecr: MagicMock, + mock_codebuild: MagicMock, + ) -> None: + """Test successful build and push.""" + with tempfile.TemporaryDirectory() as temp_dir: + # Create a minimal source directory + Path(temp_dir).joinpath("agent.py").touch() + + mock_ensure_ecr.return_value = { + "repositoryUri": "123456789012.dkr.ecr.us-west-2.amazonaws.com/bedrock-agentcore/test-agent", + "created": True, + } + + mock_codebuild.return_value = { + "buildId": "build-123", + "imageUri": "123456789012.dkr.ecr.us-west-2.amazonaws.com/bedrock-agentcore/test-agent:latest", + "status": "SUCCEEDED", + } + + result = build_and_push( + source_path=temp_dir, + agent_name="test-agent", + entrypoint="agent.py:app", + region_name="us-west-2", + ) + + assert result["status"] == "SUCCEEDED" + assert "imageUri" in result + + # Verify ECR repository was created with auto-generated name + mock_ensure_ecr.assert_called_once_with( + "bedrock-agentcore/test-agent", "us-west-2" + ) + + @patch("bedrock_agentcore.runtime.builder.build_image_codebuild") + @patch("bedrock_agentcore.runtime.builder.ensure_ecr_repository") + def test_build_and_push_with_existing_dockerfile( + self, + mock_ensure_ecr: MagicMock, + mock_codebuild: MagicMock, + ) -> None: + """Test build and push with existing Dockerfile.""" + with tempfile.TemporaryDirectory() as temp_dir: + # Create source with existing Dockerfile + Path(temp_dir).joinpath("agent.py").touch() + dockerfile = Path(temp_dir).joinpath("Dockerfile") + dockerfile.write_text("FROM python:3.12\nCMD ['python', 'custom.py']") + + mock_ensure_ecr.return_value = { + "repositoryUri": "123456789012.dkr.ecr.us-west-2.amazonaws.com/bedrock-agentcore/test-agent", + "created": False, + } + + mock_codebuild.return_value = { + "buildId": "build-456", + "imageUri": "123456789012.dkr.ecr.us-west-2.amazonaws.com/bedrock-agentcore/test-agent:v1.0", + "status": "SUCCEEDED", + } + + result = build_and_push( + source_path=temp_dir, + agent_name="test-agent", + entrypoint="custom.py", + region_name="us-west-2", + tag="v1.0", + ) + + assert result["status"] == "SUCCEEDED" + + # Verify CodeBuild was called with correct tag + mock_codebuild.assert_called_once() + call_kwargs = mock_codebuild.call_args[1] + assert call_kwargs["tag"] == "v1.0" + + @patch("bedrock_agentcore.runtime.builder.build_image_codebuild") + @patch("bedrock_agentcore.runtime.builder.ensure_ecr_repository") + def test_build_and_push_wait_false( + self, + mock_ensure_ecr: MagicMock, + mock_codebuild: MagicMock, + ) -> None: + """Test build and push without waiting.""" + with tempfile.TemporaryDirectory() as temp_dir: + Path(temp_dir).joinpath("agent.py").touch() + Path(temp_dir).joinpath("Dockerfile").touch() + + mock_ensure_ecr.return_value = { + "repositoryUri": "123456789012.dkr.ecr.us-west-2.amazonaws.com/bedrock-agentcore/test-agent", + } + + mock_codebuild.return_value = { + "buildId": "build-789", + "imageUri": "123456789012.dkr.ecr.us-west-2.amazonaws.com/bedrock-agentcore/test-agent:latest", + "status": "IN_PROGRESS", + } + + build_and_push( + source_path=temp_dir, + agent_name="test-agent", + entrypoint="agent.py:app", + wait=False, + ) + + # Verify wait=False was passed to CodeBuild + call_kwargs = mock_codebuild.call_args[1] + assert call_kwargs["wait"] is False diff --git a/tests/bedrock_agentcore/runtime/test_config.py b/tests/bedrock_agentcore/runtime/test_config.py new file mode 100644 index 00000000..544c7ade --- /dev/null +++ b/tests/bedrock_agentcore/runtime/test_config.py @@ -0,0 +1,147 @@ +"""Tests for runtime configuration models.""" + +import pytest + +from bedrock_agentcore.runtime.config import ( + NetworkConfigurationModel, + NetworkMode, + RuntimeArtifactModel, + RuntimeConfigModel, + RuntimeStatus, + VpcConfigModel, +) + + +class TestRuntimeStatus: + """Tests for RuntimeStatus enum.""" + + def test_status_values(self) -> None: + """Test that all expected status values exist.""" + assert RuntimeStatus.CREATING == "CREATING" + assert RuntimeStatus.ACTIVE == "ACTIVE" + assert RuntimeStatus.UPDATING == "UPDATING" + assert RuntimeStatus.DELETING == "DELETING" + assert RuntimeStatus.FAILED == "FAILED" + assert RuntimeStatus.NOT_FOUND == "NOT_FOUND" + + +class TestNetworkMode: + """Tests for NetworkMode enum.""" + + def test_network_mode_values(self) -> None: + """Test that network mode values exist.""" + assert NetworkMode.PUBLIC == "PUBLIC" + assert NetworkMode.VPC == "VPC" + + +class TestVpcConfigModel: + """Tests for VpcConfigModel.""" + + def test_create_with_alias(self) -> None: + """Test creating VpcConfigModel with alias names.""" + config = VpcConfigModel( + securityGroups=["sg-123", "sg-456"], + subnets=["subnet-abc", "subnet-def"], + ) + assert config.security_groups == ["sg-123", "sg-456"] + assert config.subnets == ["subnet-abc", "subnet-def"] + + def test_dump_by_alias(self) -> None: + """Test dumping config with alias names.""" + config = VpcConfigModel( + securityGroups=["sg-123"], + subnets=["subnet-abc"], + ) + data = config.model_dump(by_alias=True) + assert data["securityGroups"] == ["sg-123"] + assert data["subnets"] == ["subnet-abc"] + + +class TestNetworkConfigurationModel: + """Tests for NetworkConfigurationModel.""" + + def test_default_public_mode(self) -> None: + """Test default network mode is PUBLIC.""" + config = NetworkConfigurationModel() + assert config.network_mode == NetworkMode.PUBLIC + assert config.vpc_config is None + + def test_vpc_mode(self) -> None: + """Test VPC network mode.""" + vpc = VpcConfigModel( + securityGroups=["sg-123"], + subnets=["subnet-abc"], + ) + config = NetworkConfigurationModel( + networkMode=NetworkMode.VPC, + vpcConfig=vpc, + ) + assert config.network_mode == NetworkMode.VPC + assert config.vpc_config is not None + + +class TestRuntimeArtifactModel: + """Tests for RuntimeArtifactModel.""" + + def test_create(self) -> None: + """Test creating RuntimeArtifactModel.""" + artifact = RuntimeArtifactModel(imageUri="123456789.dkr.ecr.us-west-2.amazonaws.com/test:latest") + assert artifact.image_uri == "123456789.dkr.ecr.us-west-2.amazonaws.com/test:latest" + + def test_dump_by_alias(self) -> None: + """Test dumping with alias.""" + artifact = RuntimeArtifactModel(imageUri="test:latest") + data = artifact.model_dump(by_alias=True) + assert data["imageUri"] == "test:latest" + + +class TestRuntimeConfigModel: + """Tests for RuntimeConfigModel.""" + + def test_minimal_config(self) -> None: + """Test minimal config with just name.""" + config = RuntimeConfigModel(name="test-agent") + assert config.name == "test-agent" + assert config.description is None + assert config.artifact is None + assert config.network_configuration is None + assert config.environment_variables is None + assert config.tags is None + + def test_full_config(self) -> None: + """Test full config with all fields.""" + config = RuntimeConfigModel( + name="test-agent", + description="Test agent description", + artifact=RuntimeArtifactModel(imageUri="test:latest"), + networkConfiguration=NetworkConfigurationModel(networkMode=NetworkMode.PUBLIC), + environmentVariables={"LOG_LEVEL": "INFO"}, + tags={"Environment": "test"}, + ) + assert config.name == "test-agent" + assert config.description == "Test agent description" + assert config.artifact is not None + assert config.artifact.image_uri == "test:latest" + assert config.network_configuration is not None + assert config.environment_variables == {"LOG_LEVEL": "INFO"} + assert config.tags == {"Environment": "test"} + + def test_dump_excludes_none(self) -> None: + """Test that dump excludes None values.""" + config = RuntimeConfigModel(name="test-agent") + data = config.model_dump(by_alias=True, exclude_none=True) + assert "name" in data + assert "description" not in data + assert "artifact" not in data + + def test_validate_from_dict(self) -> None: + """Test validating from dict (YAML-like structure).""" + data = { + "name": "test-agent", + "artifact": {"imageUri": "test:latest"}, + "networkConfiguration": {"networkMode": "PUBLIC"}, + } + config = RuntimeConfigModel.model_validate(data) + assert config.name == "test-agent" + assert config.artifact is not None + assert config.artifact.image_uri == "test:latest" diff --git a/tests/bedrock_agentcore/runtime/test_ecr.py b/tests/bedrock_agentcore/runtime/test_ecr.py new file mode 100644 index 00000000..64ac4ba0 --- /dev/null +++ b/tests/bedrock_agentcore/runtime/test_ecr.py @@ -0,0 +1,179 @@ +"""Tests for ECR operations.""" + +from unittest.mock import MagicMock, patch + +from botocore.exceptions import ClientError + +from bedrock_agentcore.runtime.ecr import ( + build_ecr_uri, + delete_ecr_repository, + ensure_ecr_repository, + get_account_id, + get_ecr_client, + get_ecr_login_credentials, +) + + +class TestGetEcrClient: + """Tests for get_ecr_client.""" + + @patch("bedrock_agentcore.runtime.ecr.boto3") + def test_creates_client_with_region(self, mock_boto3: MagicMock) -> None: + """Test that client is created with specified region.""" + get_ecr_client("us-west-2") + mock_boto3.client.assert_called_once() + call_kwargs = mock_boto3.client.call_args + assert call_kwargs[0][0] == "ecr" + assert call_kwargs[1]["region_name"] == "us-west-2" + + +class TestEnsureEcrRepository: + """Tests for ensure_ecr_repository.""" + + @patch("bedrock_agentcore.runtime.ecr.get_ecr_client") + def test_repository_exists(self, mock_get_client: MagicMock) -> None: + """Test when repository already exists.""" + mock_client = MagicMock() + mock_get_client.return_value = mock_client + mock_client.describe_repositories.return_value = { + "repositories": [ + { + "repositoryName": "test-repo", + "repositoryUri": "123456789012.dkr.ecr.us-west-2.amazonaws.com/test-repo", + "repositoryArn": "arn:aws:ecr:us-west-2:123456789012:repository/test-repo", + } + ] + } + + result = ensure_ecr_repository("test-repo", "us-west-2") + + assert result["repositoryName"] == "test-repo" + assert result["created"] is False + mock_client.create_repository.assert_not_called() + + @patch("bedrock_agentcore.runtime.ecr.get_ecr_client") + def test_repository_created(self, mock_get_client: MagicMock) -> None: + """Test when repository needs to be created.""" + mock_client = MagicMock() + mock_get_client.return_value = mock_client + + # First call raises RepositoryNotFoundException + error_response = {"Error": {"Code": "RepositoryNotFoundException"}} + mock_client.describe_repositories.side_effect = ClientError(error_response, "DescribeRepositories") + + mock_client.create_repository.return_value = { + "repository": { + "repositoryName": "test-repo", + "repositoryUri": "123456789012.dkr.ecr.us-west-2.amazonaws.com/test-repo", + "repositoryArn": "arn:aws:ecr:us-west-2:123456789012:repository/test-repo", + } + } + + result = ensure_ecr_repository("test-repo", "us-west-2") + + assert result["repositoryName"] == "test-repo" + assert result["created"] is True + mock_client.create_repository.assert_called_once() + + +class TestDeleteEcrRepository: + """Tests for delete_ecr_repository.""" + + @patch("bedrock_agentcore.runtime.ecr.get_ecr_client") + def test_delete_success(self, mock_get_client: MagicMock) -> None: + """Test successful repository deletion.""" + mock_client = MagicMock() + mock_get_client.return_value = mock_client + + result = delete_ecr_repository("test-repo", "us-west-2") + + assert result["status"] == "DELETED" + assert result["repositoryName"] == "test-repo" + mock_client.delete_repository.assert_called_once_with(repositoryName="test-repo", force=False) + + @patch("bedrock_agentcore.runtime.ecr.get_ecr_client") + def test_delete_not_found(self, mock_get_client: MagicMock) -> None: + """Test deletion when repository not found.""" + mock_client = MagicMock() + mock_get_client.return_value = mock_client + + error_response = {"Error": {"Code": "RepositoryNotFoundException"}} + mock_client.delete_repository.side_effect = ClientError(error_response, "DeleteRepository") + + result = delete_ecr_repository("test-repo", "us-west-2") + + assert result["status"] == "NOT_FOUND" + + +class TestGetAccountId: + """Tests for get_account_id.""" + + @patch("bedrock_agentcore.runtime.ecr.boto3") + def test_returns_account_id(self, mock_boto3: MagicMock) -> None: + """Test that account ID is returned.""" + mock_sts = MagicMock() + mock_boto3.client.return_value = mock_sts + mock_sts.get_caller_identity.return_value = {"Account": "123456789012"} + + result = get_account_id() + + assert result == "123456789012" + + +class TestBuildEcrUri: + """Tests for build_ecr_uri.""" + + @patch("bedrock_agentcore.runtime.ecr.get_account_id") + @patch("bedrock_agentcore.runtime.ecr.boto3") + def test_build_uri(self, mock_boto3: MagicMock, mock_get_account: MagicMock) -> None: + """Test building ECR URI.""" + mock_get_account.return_value = "123456789012" + mock_session = MagicMock() + mock_session.region_name = "us-west-2" + mock_boto3.Session.return_value = mock_session + + result = build_ecr_uri("my-repo", "us-west-2", "v1.0") + + assert result == "123456789012.dkr.ecr.us-west-2.amazonaws.com/my-repo:v1.0" + + @patch("bedrock_agentcore.runtime.ecr.get_account_id") + @patch("bedrock_agentcore.runtime.ecr.boto3") + def test_build_uri_default_tag(self, mock_boto3: MagicMock, mock_get_account: MagicMock) -> None: + """Test building ECR URI with default tag.""" + mock_get_account.return_value = "123456789012" + mock_session = MagicMock() + mock_session.region_name = "us-east-1" + mock_boto3.Session.return_value = mock_session + + result = build_ecr_uri("my-repo", "us-east-1") + + assert result == "123456789012.dkr.ecr.us-east-1.amazonaws.com/my-repo:latest" + + +class TestGetEcrLoginCredentials: + """Tests for get_ecr_login_credentials.""" + + @patch("bedrock_agentcore.runtime.ecr.get_ecr_client") + def test_returns_credentials(self, mock_get_client: MagicMock) -> None: + """Test that login credentials are returned.""" + import base64 + + mock_client = MagicMock() + mock_get_client.return_value = mock_client + + # Base64 encode "AWS:secret-password" + token = base64.b64encode(b"AWS:secret-password").decode("utf-8") + mock_client.get_authorization_token.return_value = { + "authorizationData": [ + { + "authorizationToken": token, + "proxyEndpoint": "https://123456789012.dkr.ecr.us-west-2.amazonaws.com", + } + ] + } + + username, password, registry = get_ecr_login_credentials("us-west-2") + + assert username == "AWS" + assert password == "secret-password" + assert registry == "https://123456789012.dkr.ecr.us-west-2.amazonaws.com" diff --git a/tests/bedrock_agentcore/runtime/test_iam.py b/tests/bedrock_agentcore/runtime/test_iam.py new file mode 100644 index 00000000..ca6f7698 --- /dev/null +++ b/tests/bedrock_agentcore/runtime/test_iam.py @@ -0,0 +1,184 @@ +"""Tests for IAM operations.""" + +from unittest.mock import MagicMock, patch + +from botocore.exceptions import ClientError + +from bedrock_agentcore.runtime.iam import ( + delete_role, + get_iam_client, + get_or_create_codebuild_execution_role, + get_or_create_runtime_execution_role, +) + + +class TestGetIamClient: + """Tests for get_iam_client.""" + + @patch("bedrock_agentcore.runtime.iam.boto3") + def test_creates_client_with_region(self, mock_boto3: MagicMock) -> None: + """Test that client is created with specified region.""" + get_iam_client("us-west-2") + mock_boto3.client.assert_called_once() + call_kwargs = mock_boto3.client.call_args + assert call_kwargs[0][0] == "iam" + assert call_kwargs[1]["region_name"] == "us-west-2" + + +class TestGetOrCreateRuntimeExecutionRole: + """Tests for get_or_create_runtime_execution_role.""" + + @patch("bedrock_agentcore.runtime.iam.get_iam_client") + def test_role_exists(self, mock_get_client: MagicMock) -> None: + """Test when role already exists.""" + mock_client = MagicMock() + mock_get_client.return_value = mock_client + mock_client.get_role.return_value = { + "Role": { + "RoleName": "bedrock-agentcore-runtime-abc123", + "Arn": "arn:aws:iam::123456789012:role/bedrock-agentcore-runtime-abc123", + } + } + + result = get_or_create_runtime_execution_role("test-agent", "us-west-2") + + assert "roleArn" in result + assert result["created"] is False + mock_client.create_role.assert_not_called() + + @patch("bedrock_agentcore.runtime.iam.boto3") + @patch("bedrock_agentcore.runtime.iam.get_iam_client") + def test_role_created(self, mock_get_client: MagicMock, mock_boto3: MagicMock) -> None: + """Test when role needs to be created.""" + mock_client = MagicMock() + mock_get_client.return_value = mock_client + + # Mock STS client for account ID + mock_sts = MagicMock() + mock_boto3.client.return_value = mock_sts + mock_sts.get_caller_identity.return_value = {"Account": "123456789012"} + mock_boto3.Session.return_value.region_name = "us-west-2" + + # First call raises NoSuchEntityException + error_response = {"Error": {"Code": "NoSuchEntity"}} + mock_client.get_role.side_effect = ClientError(error_response, "GetRole") + + mock_client.create_role.return_value = { + "Role": { + "RoleName": "bedrock-agentcore-runtime-abc123", + "Arn": "arn:aws:iam::123456789012:role/bedrock-agentcore-runtime-abc123", + } + } + + result = get_or_create_runtime_execution_role("test-agent", "us-west-2") + + assert "roleArn" in result + assert result["created"] is True + mock_client.create_role.assert_called_once() + # Uses put_role_policy for inline policy, not attach_role_policy + mock_client.put_role_policy.assert_called_once() + + +class TestGetOrCreateCodeBuildExecutionRole: + """Tests for get_or_create_codebuild_execution_role.""" + + @patch("bedrock_agentcore.runtime.iam.get_iam_client") + def test_role_exists(self, mock_get_client: MagicMock) -> None: + """Test when role already exists.""" + mock_client = MagicMock() + mock_get_client.return_value = mock_client + mock_client.get_role.return_value = { + "Role": { + "RoleName": "bedrock-agentcore-codebuild-abc123", + "Arn": "arn:aws:iam::123456789012:role/bedrock-agentcore-codebuild-abc123", + } + } + + result = get_or_create_codebuild_execution_role( + "test-agent", + "arn:aws:ecr:us-west-2:123456789012:repository/test-repo", + "us-west-2", + "test-bucket", + ) + + assert "roleArn" in result + assert result["created"] is False + mock_client.create_role.assert_not_called() + + @patch("bedrock_agentcore.runtime.iam.get_iam_client") + def test_role_created(self, mock_get_client: MagicMock) -> None: + """Test when role needs to be created.""" + mock_client = MagicMock() + mock_get_client.return_value = mock_client + + # First call raises NoSuchEntityException + error_response = {"Error": {"Code": "NoSuchEntity"}} + mock_client.get_role.side_effect = ClientError(error_response, "GetRole") + + mock_client.create_role.return_value = { + "Role": { + "RoleName": "bedrock-agentcore-codebuild-abc123", + "Arn": "arn:aws:iam::123456789012:role/bedrock-agentcore-codebuild-abc123", + } + } + + result = get_or_create_codebuild_execution_role( + "test-agent", + "arn:aws:ecr:us-west-2:123456789012:repository/test-repo", + "us-west-2", + "test-bucket", + ) + + assert "roleArn" in result + assert result["created"] is True + mock_client.create_role.assert_called_once() + mock_client.put_role_policy.assert_called_once() + + +class TestDeleteRole: + """Tests for delete_role.""" + + @patch("bedrock_agentcore.runtime.iam.get_iam_client") + def test_delete_success(self, mock_get_client: MagicMock) -> None: + """Test successful role deletion.""" + mock_client = MagicMock() + mock_get_client.return_value = mock_client + + # Mock list_role_policies (inline policies) + mock_client.list_role_policies.return_value = {"PolicyNames": ["InlinePolicy"]} + + result = delete_role("test-role", "us-west-2") + + assert result["status"] == "DELETED" + assert result["roleName"] == "test-role" + mock_client.list_role_policies.assert_called_once() + mock_client.delete_role_policy.assert_called_once() + mock_client.delete_role.assert_called_once() + + @patch("bedrock_agentcore.runtime.iam.get_iam_client") + def test_delete_no_inline_policies(self, mock_get_client: MagicMock) -> None: + """Test deletion when role has no inline policies.""" + mock_client = MagicMock() + mock_get_client.return_value = mock_client + + # No inline policies + mock_client.list_role_policies.return_value = {"PolicyNames": []} + + result = delete_role("test-role", "us-west-2") + + assert result["status"] == "DELETED" + mock_client.delete_role_policy.assert_not_called() + mock_client.delete_role.assert_called_once() + + @patch("bedrock_agentcore.runtime.iam.get_iam_client") + def test_delete_not_found(self, mock_get_client: MagicMock) -> None: + """Test deletion when role not found.""" + mock_client = MagicMock() + mock_get_client.return_value = mock_client + + error_response = {"Error": {"Code": "NoSuchEntity"}} + mock_client.list_role_policies.side_effect = ClientError(error_response, "ListRolePolicies") + + result = delete_role("test-role", "us-west-2") + + assert result["status"] == "NOT_FOUND" diff --git a/tests/bedrock_agentcore/test_init.py b/tests/bedrock_agentcore/test_init.py index f757719c..aa093b9c 100644 --- a/tests/bedrock_agentcore/test_init.py +++ b/tests/bedrock_agentcore/test_init.py @@ -23,8 +23,9 @@ def test_all_exports(): # Test __all__ contains expected items expected_all = [ "BedrockAgentCoreApp", + "PingStatus", + "Project", "RequestContext", "BedrockAgentCoreContext", - "PingStatus", ] assert sorted(bedrock_agentcore.__all__) == sorted(expected_all) diff --git a/tests/bedrock_agentcore/test_project.py b/tests/bedrock_agentcore/test_project.py new file mode 100644 index 00000000..888902d0 --- /dev/null +++ b/tests/bedrock_agentcore/test_project.py @@ -0,0 +1,279 @@ +"""Tests for Project class.""" + +import json +import tempfile +from pathlib import Path +from unittest.mock import MagicMock, patch + +import pytest + +from bedrock_agentcore import Project +from bedrock_agentcore.memory import Memory +from bedrock_agentcore.runtime import Agent +from bedrock_agentcore.runtime.build import DirectCodeDeploy, ECR + + +class TestProjectInit: + """Tests for Project initialization.""" + + @patch("bedrock_agentcore.project.boto3") + def test_basic_init(self, mock_boto3: MagicMock) -> None: + """Test basic project initialization.""" + mock_boto3.Session.return_value.region_name = "us-west-2" + + project = Project(name="test-project") + + assert project.name == "test-project" + assert project.region == "us-west-2" + assert project.agents == [] + assert project.memories == [] + + @patch("bedrock_agentcore.project.boto3") + def test_init_with_all_params(self, mock_boto3: MagicMock) -> None: + """Test project initialization with all parameters.""" + mock_boto3.Session.return_value.region_name = "us-west-2" + + project = Project( + name="test-project", + version="1.0.0", + description="Test description", + region="us-east-1", + ) + + assert project.name == "test-project" + assert project.version == "1.0.0" + assert project.description == "Test description" + assert project.region == "us-east-1" + + +class TestProjectFromJson: + """Tests for Project.from_json().""" + + @patch("bedrock_agentcore.project.boto3") + @patch("bedrock_agentcore.runtime.agent.boto3") + @patch("bedrock_agentcore.memory.client.boto3") + def test_from_json_minimal( + self, mock_mem_boto3: MagicMock, mock_agent_boto3: MagicMock, mock_proj_boto3: MagicMock + ) -> None: + """Test loading minimal project from JSON.""" + mock_proj_boto3.Session.return_value.region_name = "us-west-2" + mock_agent_boto3.Session.return_value.region_name = "us-west-2" + mock_mem_boto3.Session.return_value.region_name = "us-west-2" + + config = { + "name": "test-project", + "agents": [ + { + "name": "test-agent", + "runtime": { + "artifact": "CodeZip", + "entrypoint": "main.py:handler", + "codeLocation": "./src", + }, + } + ], + } + + with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f: + json.dump(config, f) + f.flush() + + project = Project.from_json(f.name) + + assert project.name == "test-project" + assert len(project.agents) == 1 + assert project.agents[0].name == "test-agent" + + @patch("bedrock_agentcore.project.boto3") + @patch("bedrock_agentcore.runtime.agent.boto3") + @patch("bedrock_agentcore.memory.client.boto3") + def test_from_json_with_memory( + self, mock_mem_boto3: MagicMock, mock_agent_boto3: MagicMock, mock_proj_boto3: MagicMock + ) -> None: + """Test loading project with memory providers from JSON.""" + mock_proj_boto3.Session.return_value.region_name = "us-west-2" + mock_agent_boto3.Session.return_value.region_name = "us-west-2" + mock_mem_boto3.Session.return_value.region_name = "us-west-2" + + config = { + "name": "test-project", + "agents": [ + { + "name": "test-agent", + "runtime": { + "entrypoint": "main.py:handler", + "codeLocation": "./src", + }, + "memoryProviders": [ + { + "type": "AgentCoreMemory", + "relation": "own", + "name": "test-memory", + "memoryStrategies": [{"type": "SEMANTIC"}], + } + ], + } + ], + } + + with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as f: + json.dump(config, f) + f.flush() + + project = Project.from_json(f.name) + + assert len(project.memories) == 1 + assert project.memories[0].name == "test-memory" + + def test_from_json_file_not_found(self) -> None: + """Test from_json raises FileNotFoundError for missing file.""" + with pytest.raises(FileNotFoundError): + Project.from_json("/nonexistent/file.json") + + +class TestProjectResourceManagement: + """Tests for resource management methods.""" + + @patch("bedrock_agentcore.project.boto3") + @patch("bedrock_agentcore.runtime.agent.boto3") + def test_add_agent(self, mock_agent_boto3: MagicMock, mock_proj_boto3: MagicMock) -> None: + """Test adding an agent to project.""" + mock_proj_boto3.Session.return_value.region_name = "us-west-2" + mock_agent_boto3.Session.return_value.region_name = "us-west-2" + + project = Project(name="test-project") + build = ECR(image_uri="123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest") + agent = Agent(name="test-agent", build=build) + + result = project.add_agent(agent) + + assert result is project # Method chaining + assert len(project.agents) == 1 + assert project.agents[0].name == "test-agent" + + @patch("bedrock_agentcore.project.boto3") + @patch("bedrock_agentcore.memory.client.boto3") + def test_add_memory(self, mock_mem_boto3: MagicMock, mock_proj_boto3: MagicMock) -> None: + """Test adding a memory to project.""" + mock_proj_boto3.Session.return_value.region_name = "us-west-2" + mock_mem_boto3.Session.return_value.region_name = "us-west-2" + + project = Project(name="test-project") + memory = Memory(name="test-memory") + + result = project.add_memory(memory) + + assert result is project # Method chaining + assert len(project.memories) == 1 + assert project.memories[0].name == "test-memory" + + @patch("bedrock_agentcore.project.boto3") + @patch("bedrock_agentcore.runtime.agent.boto3") + def test_get_agent(self, mock_agent_boto3: MagicMock, mock_proj_boto3: MagicMock) -> None: + """Test getting an agent by name.""" + mock_proj_boto3.Session.return_value.region_name = "us-west-2" + mock_agent_boto3.Session.return_value.region_name = "us-west-2" + + project = Project(name="test-project") + build = ECR(image_uri="123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest") + agent = Agent(name="test-agent", build=build) + project.add_agent(agent) + + result = project.get_agent("test-agent") + assert result is agent + + @patch("bedrock_agentcore.project.boto3") + def test_get_agent_not_found(self, mock_boto3: MagicMock) -> None: + """Test get_agent raises KeyError for missing agent.""" + mock_boto3.Session.return_value.region_name = "us-west-2" + + project = Project(name="test-project") + + with pytest.raises(KeyError, match="Agent not found"): + project.get_agent("nonexistent") + + @patch("bedrock_agentcore.project.boto3") + @patch("bedrock_agentcore.runtime.agent.boto3") + def test_remove_agent(self, mock_agent_boto3: MagicMock, mock_proj_boto3: MagicMock) -> None: + """Test removing an agent from project.""" + mock_proj_boto3.Session.return_value.region_name = "us-west-2" + mock_agent_boto3.Session.return_value.region_name = "us-west-2" + + project = Project(name="test-project") + build = ECR(image_uri="123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest") + agent = Agent(name="test-agent", build=build) + project.add_agent(agent) + + result = project.remove_agent("test-agent") + + assert result is project # Method chaining + assert len(project.agents) == 0 + + +class TestProjectSave: + """Tests for Project.save().""" + + @patch("bedrock_agentcore.project.boto3") + @patch("bedrock_agentcore.runtime.agent.boto3") + @patch("bedrock_agentcore.memory.client.boto3") + def test_save_to_json( + self, mock_mem_boto3: MagicMock, mock_agent_boto3: MagicMock, mock_proj_boto3: MagicMock + ) -> None: + """Test saving project to JSON file.""" + mock_proj_boto3.Session.return_value.region_name = "us-west-2" + mock_agent_boto3.Session.return_value.region_name = "us-west-2" + mock_mem_boto3.Session.return_value.region_name = "us-west-2" + + project = Project(name="test-project", version="1.0.0") + build = DirectCodeDeploy(source_path="./src", entrypoint="main.py:handler") + agent = Agent(name="test-agent", build=build) + memory = Memory( + name="test-memory", strategies=[{"type": "SEMANTIC", "namespace": "facts/{sessionId}/"}] + ) + project.add_agent(agent) + project.add_memory(memory) + + with tempfile.TemporaryDirectory() as temp_dir: + output_path = Path(temp_dir) / "agentcore.json" + result = project.save(str(output_path)) + + assert Path(result).exists() + + with open(result) as f: + saved_config = json.load(f) + + assert saved_config["name"] == "test-project" + assert saved_config["version"] == "1.0.0" + assert len(saved_config["agents"]) == 1 + assert saved_config["agents"][0]["name"] == "test-agent" + + +class TestProjectStatus: + """Tests for Project.status().""" + + @patch("bedrock_agentcore.project.boto3") + @patch("bedrock_agentcore.runtime.agent.boto3") + @patch("bedrock_agentcore.memory.client.boto3") + def test_status( + self, mock_mem_boto3: MagicMock, mock_agent_boto3: MagicMock, mock_proj_boto3: MagicMock + ) -> None: + """Test getting status of all resources.""" + mock_proj_boto3.Session.return_value.region_name = "us-west-2" + mock_agent_boto3.Session.return_value.region_name = "us-west-2" + mock_mem_boto3.Session.return_value.region_name = "us-west-2" + + project = Project(name="test-project") + build = ECR(image_uri="123456789012.dkr.ecr.us-west-2.amazonaws.com/test:latest") + agent = Agent(name="test-agent", build=build) + memory = Memory(name="test-memory") + project.add_agent(agent) + project.add_memory(memory) + + status = project.status() + + assert "agents" in status + assert "memories" in status + assert "test-agent" in status["agents"] + assert "test-memory" in status["memories"] + assert status["agents"]["test-agent"]["deployed"] is False + assert status["memories"]["test-memory"]["active"] is False diff --git a/uv.lock b/uv.lock index 511e4d2a..4cb7986b 100644 --- a/uv.lock +++ b/uv.lock @@ -228,6 +228,7 @@ dependencies = [ { name = "boto3" }, { name = "botocore" }, { name = "pydantic" }, + { name = "pyyaml" }, { name = "starlette" }, { name = "typing-extensions" }, { name = "urllib3" }, @@ -264,6 +265,7 @@ requires-dist = [ { name = "boto3", specifier = ">=1.40.52" }, { name = "botocore", specifier = ">=1.40.52" }, { name = "pydantic", specifier = ">=2.0.0,<2.41.3" }, + { name = "pyyaml", specifier = ">=6.0" }, { name = "starlette", specifier = ">=0.46.2" }, { name = "strands-agents", marker = "extra == 'strands-agents'", specifier = ">=1.1.0" }, { name = "strands-agents-evals", marker = "extra == 'strands-agents-evals'", specifier = ">=0.1.0" },