Skip to content

Commit 4b7c709

Browse files
theblazehenclaude
andcommitted
Add logging to thinking signature cache for observability
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1 parent a35b586 commit 4b7c709

1 file changed

Lines changed: 5 additions & 2 deletions

File tree

src/rotator_library/providers/anthropic_provider.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -178,18 +178,20 @@ def _openai_messages_to_anthropic(self, messages: List[Dict[str, Any]]) -> tuple
178178
# the original Anthropic response)
179179
cached = self._retrieve_thinking_blocks(reasoning)
180180
if cached:
181+
lib_logger.info(f"Thinking signature cache HIT – restored {len(cached)} block(s)")
181182
blocks.extend(cached)
182183
else:
183184
# Fallback: inline signature from client (custom clients)
184185
thinking_sig = msg.get("thinking_signature")
185186
if thinking_sig and len(thinking_sig) >= 100:
187+
lib_logger.debug("Using inline thinking signature from client")
186188
blocks.append({
187189
"type": "thinking",
188190
"thinking": reasoning,
189191
"signature": thinking_sig,
190192
})
191-
# else: no signature → drop thinking block,
192-
# model generates fresh thinking (cache miss on prefix)
193+
else:
194+
lib_logger.warning("Thinking signature cache MISS – dropping thinking block")
193195

194196
if isinstance(content, str) and content.strip():
195197
blocks.append({"type": "text", "text": content})
@@ -583,6 +585,7 @@ def _anthropic_event_to_openai_chunks(
583585
full_thinking = "".join(b["thinking"] for b in thinking_blocks)
584586
cache_key = hashlib.sha256(full_thinking.encode()).hexdigest()
585587
_get_thinking_cache().store(cache_key, json.dumps(thinking_blocks))
588+
lib_logger.info(f"Thinking signature cache STORE – {len(thinking_blocks)} block(s), key={cache_key[:12]}...")
586589

587590
return
588591

0 commit comments

Comments
 (0)