Skip to content

Commit be9725b

Browse files
Normalize SDK errors for clients (#980)
Co-authored-by: openhands <openhands@all-hands.dev>
1 parent ed146c9 commit be9725b

File tree

12 files changed

+427
-176
lines changed

12 files changed

+427
-176
lines changed

openhands-sdk/openhands/sdk/agent/agent.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,10 @@
2828
TextContent,
2929
ThinkingBlock,
3030
)
31-
from openhands.sdk.llm.exceptions import FunctionCallValidationError
31+
from openhands.sdk.llm.exceptions import (
32+
FunctionCallValidationError,
33+
LLMContextWindowExceedError,
34+
)
3235
from openhands.sdk.logger import get_logger
3336
from openhands.sdk.security.confirmation_policy import NeverConfirm
3437
from openhands.sdk.security.llm_analyzer import LLMSecurityAnalyzer
@@ -168,22 +171,19 @@ def step(
168171
)
169172
on_event(error_message)
170173
return
171-
except Exception as e:
172-
# If there is a condenser registered and the exception is a context window
173-
# exceeded, we can recover by triggering a condensation request.
174+
except LLMContextWindowExceedError:
175+
# If condenser is available and handles requests, trigger condensation
174176
if (
175177
self.condenser is not None
176178
and self.condenser.handles_condensation_requests()
177-
and self.llm.is_context_window_exceeded_exception(e)
178179
):
179180
logger.warning(
180181
"LLM raised context window exceeded error, triggering condensation"
181182
)
182183
on_event(CondensationRequest())
183184
return
184-
# If the error isn't recoverable, keep propagating it up the stack.
185-
else:
186-
raise e
185+
# No condenser available; re-raise for client handling
186+
raise
187187

188188
# LLMResponse already contains the converted message and metrics snapshot
189189
message: Message = llm_response.message

openhands-sdk/openhands/sdk/llm/exceptions.py

Lines changed: 0 additions & 110 deletions
This file was deleted.
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
from .classifier import is_context_window_exceeded, looks_like_auth_error
2+
from .mapping import map_provider_exception
3+
from .types import (
4+
FunctionCallConversionError,
5+
FunctionCallNotExistsError,
6+
FunctionCallValidationError,
7+
LLMAuthenticationError,
8+
LLMBadRequestError,
9+
LLMContextWindowExceedError,
10+
LLMError,
11+
LLMMalformedActionError,
12+
LLMNoActionError,
13+
LLMNoResponseError,
14+
LLMRateLimitError,
15+
LLMResponseError,
16+
LLMServiceUnavailableError,
17+
LLMTimeoutError,
18+
OperationCancelled,
19+
UserCancelledError,
20+
)
21+
22+
23+
__all__ = [
24+
# Types
25+
"LLMError",
26+
"LLMMalformedActionError",
27+
"LLMNoActionError",
28+
"LLMResponseError",
29+
"FunctionCallConversionError",
30+
"FunctionCallValidationError",
31+
"FunctionCallNotExistsError",
32+
"LLMNoResponseError",
33+
"LLMContextWindowExceedError",
34+
"LLMAuthenticationError",
35+
"LLMRateLimitError",
36+
"LLMTimeoutError",
37+
"LLMServiceUnavailableError",
38+
"LLMBadRequestError",
39+
"UserCancelledError",
40+
"OperationCancelled",
41+
# Helpers
42+
"is_context_window_exceeded",
43+
"looks_like_auth_error",
44+
"map_provider_exception",
45+
]
Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
from __future__ import annotations
2+
3+
from litellm.exceptions import BadRequestError, ContextWindowExceededError, OpenAIError
4+
5+
from .types import LLMContextWindowExceedError
6+
7+
8+
# Minimal, provider-agnostic context-window detection
9+
LONG_PROMPT_PATTERNS: list[str] = [
10+
"contextwindowexceedederror",
11+
"prompt is too long",
12+
"input length and `max_tokens` exceed context limit",
13+
"please reduce the length of",
14+
"the request exceeds the available context size",
15+
"context length exceeded",
16+
]
17+
18+
19+
def is_context_window_exceeded(exception: Exception) -> bool:
20+
if isinstance(exception, (ContextWindowExceededError, LLMContextWindowExceedError)):
21+
return True
22+
23+
if not isinstance(exception, (BadRequestError, OpenAIError)):
24+
return False
25+
26+
s = str(exception).lower()
27+
return any(p in s for p in LONG_PROMPT_PATTERNS)
28+
29+
30+
AUTH_PATTERNS: list[str] = [
31+
"invalid api key",
32+
"unauthorized",
33+
"missing api key",
34+
"invalid authentication",
35+
"access denied",
36+
]
37+
38+
39+
def looks_like_auth_error(exception: Exception) -> bool:
40+
if not isinstance(exception, (BadRequestError, OpenAIError)):
41+
return False
42+
s = str(exception).lower()
43+
if any(p in s for p in AUTH_PATTERNS):
44+
return True
45+
# Some providers include explicit status codes in message text
46+
for code in ("status 401", "status 403"):
47+
if code in s:
48+
return True
49+
return False
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
from __future__ import annotations
2+
3+
from litellm.exceptions import (
4+
APIConnectionError,
5+
BadRequestError,
6+
InternalServerError,
7+
RateLimitError,
8+
ServiceUnavailableError,
9+
Timeout as LiteLLMTimeout,
10+
)
11+
12+
from .classifier import is_context_window_exceeded, looks_like_auth_error
13+
from .types import (
14+
LLMAuthenticationError,
15+
LLMBadRequestError,
16+
LLMContextWindowExceedError,
17+
LLMRateLimitError,
18+
LLMServiceUnavailableError,
19+
LLMTimeoutError,
20+
)
21+
22+
23+
def map_provider_exception(exception: Exception) -> Exception:
24+
"""
25+
Map provider/LiteLLM exceptions to SDK-typed exceptions.
26+
27+
Returns original exception if no mapping applies.
28+
"""
29+
# Context window exceeded first (highest priority)
30+
if is_context_window_exceeded(exception):
31+
return LLMContextWindowExceedError(str(exception))
32+
33+
# Auth-like errors often appear as BadRequest/OpenAIError with specific text
34+
if looks_like_auth_error(exception):
35+
return LLMAuthenticationError(str(exception))
36+
37+
if isinstance(exception, RateLimitError):
38+
return LLMRateLimitError(str(exception))
39+
40+
if isinstance(exception, LiteLLMTimeout):
41+
return LLMTimeoutError(str(exception))
42+
43+
# Connectivity and service-side availability issues → service unavailable
44+
if isinstance(
45+
exception, (APIConnectionError, ServiceUnavailableError, InternalServerError)
46+
):
47+
return LLMServiceUnavailableError(str(exception))
48+
49+
# Generic client-side 4xx errors
50+
if isinstance(exception, BadRequestError):
51+
return LLMBadRequestError(str(exception))
52+
53+
# Unknown: let caller re-raise original
54+
return exception

0 commit comments

Comments
 (0)