From cdd12764783aaef52e51576f4b564c302fe6bd07 Mon Sep 17 00:00:00 2001 From: Yoon PyungHo Date: Thu, 23 Oct 2025 19:54:37 +0900 Subject: [PATCH] fix: raise ContextWindowOverflowException when context limit exceeded Handle Anthropic/LLM API errors containing "input length and `max_tokens` exceed context limit" by explicitly raising ContextWindowOverflowException instead of generic RemoteProtocolError(for strands-agent 0.12.0) or EventLoopException or anthropic.BadRequestError(for anthropic 0.71.0, strands-agent 0.13.0) --- src/strands/models/anthropic.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/strands/models/anthropic.py b/src/strands/models/anthropic.py index 48351da19..760776ed5 100644 --- a/src/strands/models/anthropic.py +++ b/src/strands/models/anthropic.py @@ -42,6 +42,7 @@ class AnthropicModel(Model): "input is too long", "input length exceeds context window", "input and output tokens exceed your context limit", + "input length and `max_tokens` exceed context limit, } class AnthropicConfig(TypedDict, total=False):