Skip to content

Commit 58a891f

Browse files
committed
feat(cache): cache last 3 messages for anthropic auto mode
1 parent 3ef976f commit 58a891f

File tree

2 files changed

+6
-5
lines changed

2 files changed

+6
-5
lines changed

src/mcp_agent/config.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -118,7 +118,7 @@ class AnthropicSettings(BaseModel):
118118
Controls how caching is applied for Anthropic models when prompt_caching is enabled globally.
119119
- "off": No caching, even if global prompt_caching is true.
120120
- "prompt": Caches the initial system/user prompt. Useful for large, static prompts.
121-
- "auto": Caches the last user message. Default behavior if prompt_caching is true.
121+
- "auto": Caches the last three messages. Default behavior if prompt_caching is true.
122122
"""
123123

124124
model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)

src/mcp_agent/llm/providers/augmented_llm_anthropic.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -130,11 +130,12 @@ async def _anthropic_completion(
130130
if cache_mode == "auto":
131131
apply_cache_to_system_prompt = True # Cache system prompt
132132
if messages: # If there are any messages
133-
messages_to_cache_indices.append(
134-
len(messages) - 1
135-
) # Cache only the last message
133+
# Cache the last 3 messages
134+
messages_to_cache_indices.extend(
135+
range(max(0, len(messages) - 3), len(messages))
136+
)
136137
self.logger.debug(
137-
f"Auto mode: Caching system prompt (if present) and last message at index: {messages_to_cache_indices}"
138+
f"Auto mode: Caching system prompt (if present) and last three messages at indices: {messages_to_cache_indices}"
138139
)
139140
elif cache_mode == "prompt":
140141
# Find the first user message in the fully constructed messages list

0 commit comments

Comments
 (0)