From 660f49c87947afdf9a1bbf95b381a808d98f7445 Mon Sep 17 00:00:00 2001 From: Soham Kukreti Date: Thu, 30 Apr 2026 22:13:54 +0530 Subject: [PATCH] fix: add bedrock to PROVIDER_MODELS_PREFIXES so AWS credential auth works LLMConfig.__init__ checks PROVIDER_MODELS_PREFIXES when api_token=None. If the provider prefix isn't found, it silently falls through to the else branch and overwrites self.provider with DEFAULT_PROVIDER (openai/gpt-4o), meaning any bedrock/* model string was being replaced before the LLM call was even made. This broke supported Bedrock auth methods when api_token is not passed in the LLMConfig Only passing api_token= explicitly worked, because the truthy api_token bypassed the prefix check entirely. Adding "bedrock": None to PROVIDER_MODELS_PREFIXES keeps self.provider intact so the correct Bedrock provider is used. The actual auth (SigV4 signing or Bearer header) is handled downstream based on what credentials are available in the environment. --- crawl4ai/config.py | 1 + 1 file changed, 1 insertion(+) diff --git a/crawl4ai/config.py b/crawl4ai/config.py index 507965af6..5d394136f 100644 --- a/crawl4ai/config.py +++ b/crawl4ai/config.py @@ -36,6 +36,7 @@ "anthropic": os.getenv("ANTHROPIC_API_KEY"), "gemini": os.getenv("GEMINI_API_KEY"), "deepseek": os.getenv("DEEPSEEK_API_KEY"), + "bedrock": None, # Bedrock uses AWS credential chain (SigV4) or explicit api_token for bearer auth } # Chunk token threshold