Skip to content

Commit 717b900

Browse files
authored
Reraise LLMResponseValidationError on pydantic validation errors (#23)
1 parent eca9dc9 commit 717b900

File tree

8 files changed

+56
-15
lines changed

8 files changed

+56
-15
lines changed

README.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -182,6 +182,9 @@ async with any_llm_client.OpenAIClient(config, ...) as client:
182182

183183
- `any_llm_client.LLMError` or `any_llm_client.OutOfTokensOrSymbolsError` when the LLM API responds with a failed HTTP status,
184184
- `any_llm_client.LLMRequestValidationError` when images are passed to YandexGPT client.
185+
- `any_llm_client.LLMResponseValidationError` when invalid response come from LLM API (reraised from `pydantic.ValidationError`).
186+
187+
All these exceptions inherit from the base class `any_llm_client.AnyLLMClientError`.
185188

186189
#### Timeouts, proxy & other HTTP settings
187190

any_llm_client/__init__.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
from any_llm_client.clients.yandexgpt import YandexGPTClient, YandexGPTConfig
44
from any_llm_client.core import (
55
AnyContentItem,
6+
AnyLLMClientError,
67
AssistantMessage,
78
ContentItemList,
89
ImageContentItem,
@@ -11,6 +12,7 @@
1112
LLMError,
1213
LLMRequestValidationError,
1314
LLMResponse,
15+
LLMResponseValidationError,
1416
Message,
1517
MessageRole,
1618
OutOfTokensOrSymbolsError,
@@ -24,6 +26,7 @@
2426

2527
__all__ = [
2628
"AnyContentItem",
29+
"AnyLLMClientError",
2730
"AnyLLMConfig",
2831
"AssistantMessage",
2932
"ContentItemList",
@@ -33,6 +36,7 @@
3336
"LLMError",
3437
"LLMRequestValidationError",
3538
"LLMResponse",
39+
"LLMResponseValidationError",
3640
"Message",
3741
"MessageRole",
3842
"MockLLMClient",

any_llm_client/clients/openai.py

Lines changed: 20 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
LLMConfigValue,
1818
LLMError,
1919
LLMResponse,
20+
LLMResponseValidationError,
2021
Message,
2122
MessageRole,
2223
OutOfTokensOrSymbolsError,
@@ -236,27 +237,41 @@ async def request_llm_message(
236237
)
237238
except httpx.HTTPStatusError as exception:
238239
_handle_status_error(status_code=exception.response.status_code, content=exception.response.content)
240+
239241
try:
240242
validated_message_model: typing.Final = (
241243
ChatCompletionsNotStreamingResponse.model_validate_json(response.content).choices[0].message
242244
)
243-
return LLMResponse(
244-
content=validated_message_model.content,
245-
reasoning_content=validated_message_model.reasoning_content,
246-
)
245+
except pydantic.ValidationError as validation_error:
246+
raise LLMResponseValidationError(
247+
response_content=response.content, original_error=validation_error
248+
) from validation_error
247249
finally:
248250
await response.aclose()
249251

252+
return LLMResponse(
253+
content=validated_message_model.content,
254+
reasoning_content=validated_message_model.reasoning_content,
255+
)
256+
250257
async def _iter_response_chunks(self, response: httpx.Response) -> typing.AsyncIterable[LLMResponse]:
251258
async for event in httpx_sse.EventSource(response).aiter_sse():
252259
if event.data == "[DONE]":
253260
break
254-
validated_response = ChatCompletionsStreamingEvent.model_validate_json(event.data)
261+
262+
try:
263+
validated_response = ChatCompletionsStreamingEvent.model_validate_json(event.data)
264+
except pydantic.ValidationError as validation_error:
265+
raise LLMResponseValidationError(
266+
response_content=response.content, original_error=validation_error
267+
) from validation_error
268+
255269
if not (
256270
(validated_delta := validated_response.choices[0].delta)
257271
and (validated_delta.content or validated_delta.reasoning_content)
258272
):
259273
continue
274+
260275
yield LLMResponse(content=validated_delta.content, reasoning_content=validated_delta.reasoning_content)
261276

262277
@contextlib.asynccontextmanager

any_llm_client/clients/yandexgpt.py

Lines changed: 16 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
LLMError,
1919
LLMRequestValidationError,
2020
LLMResponse,
21+
LLMResponseValidationError,
2122
Message,
2223
MessageRole,
2324
OutOfTokensOrSymbolsError,
@@ -173,14 +174,25 @@ async def request_llm_message(
173174
except httpx.HTTPStatusError as exception:
174175
_handle_status_error(status_code=exception.response.status_code, content=exception.response.content)
175176

176-
return LLMResponse(
177-
content=YandexGPTResponse.model_validate_json(response.content).result.alternatives[0].message.text,
178-
)
177+
try:
178+
validated_response: typing.Final = YandexGPTResponse.model_validate_json(response.content)
179+
except pydantic.ValidationError as validation_error:
180+
raise LLMResponseValidationError(
181+
response_content=response.content, original_error=validation_error
182+
) from validation_error
183+
184+
return LLMResponse(content=validated_response.result.alternatives[0].message.text)
179185

180186
async def _iter_response_chunks(self, response: httpx.Response) -> typing.AsyncIterable[LLMResponse]:
181187
previous_cursor = 0
182188
async for one_line in response.aiter_lines():
183-
validated_response = YandexGPTResponse.model_validate_json(one_line)
189+
try:
190+
validated_response = YandexGPTResponse.model_validate_json(one_line)
191+
except pydantic.ValidationError as validation_error:
192+
raise LLMResponseValidationError(
193+
response_content=response.content, original_error=validation_error
194+
) from validation_error
195+
184196
response_text = validated_response.result.alternatives[0].message.text
185197
yield LLMResponse(content=response_text[previous_cursor:])
186198
previous_cursor = len(response_text)

any_llm_client/core.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -147,3 +147,9 @@ class OutOfTokensOrSymbolsError(LLMError): ...
147147
@dataclasses.dataclass
148148
class LLMRequestValidationError(AnyLLMClientError):
149149
message: str
150+
151+
152+
@dataclasses.dataclass
153+
class LLMResponseValidationError(AnyLLMClientError):
154+
response_content: bytes
155+
original_error: pydantic.ValidationError

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ dynamic = ["version"]
2727
dev = [
2828
"anyio",
2929
"faker",
30-
"polyfactory",
30+
"polyfactory==2.20.0",
3131
"pydantic-settings",
3232
"pytest-cov",
3333
"pytest",

tests/test_openai_client.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22

33
import faker
44
import httpx
5-
import pydantic
65
import pytest
76
from polyfactory.factories.pydantic_factory import ModelFactory
87

@@ -17,6 +16,7 @@
1716
OneStreamingChoice,
1817
OneStreamingChoiceDelta,
1918
)
19+
from any_llm_client.core import LLMResponseValidationError
2020
from tests.conftest import LLMFuncRequest, LLMFuncRequestFactory, consume_llm_message_chunks
2121

2222

@@ -58,7 +58,7 @@ async def test_fails_without_alternatives(self) -> None:
5858
transport=httpx.MockTransport(lambda _: response),
5959
)
6060

61-
with pytest.raises(pydantic.ValidationError):
61+
with pytest.raises(LLMResponseValidationError):
6262
await client.request_llm_message(**LLMFuncRequestFactory.build())
6363

6464

@@ -118,7 +118,7 @@ async def test_fails_without_alternatives(self) -> None:
118118
transport=httpx.MockTransport(lambda _: response),
119119
)
120120

121-
with pytest.raises(pydantic.ValidationError):
121+
with pytest.raises(LLMResponseValidationError):
122122
await consume_llm_message_chunks(client.stream_llm_message_chunks(**LLMFuncRequestFactory.build()))
123123

124124

tests/test_yandexgpt_client.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99

1010
import any_llm_client
1111
from any_llm_client.clients.yandexgpt import YandexGPTAlternative, YandexGPTMessage, YandexGPTResponse, YandexGPTResult
12+
from any_llm_client.core import LLMResponseValidationError
1213
from tests.conftest import LLMFuncRequest, LLMFuncRequestFactory, consume_llm_message_chunks
1314

1415

@@ -95,7 +96,7 @@ async def test_fails_without_alternatives(self) -> None:
9596
transport=httpx.MockTransport(lambda _: response),
9697
)
9798

98-
with pytest.raises(pydantic.ValidationError):
99+
with pytest.raises(LLMResponseValidationError):
99100
await client.request_llm_message(**LLMFuncRequestWithTextContentMessagesFactory.build())
100101

101102

@@ -153,7 +154,7 @@ async def test_fails_without_alternatives(self) -> None:
153154
transport=httpx.MockTransport(lambda _: response),
154155
)
155156

156-
with pytest.raises(pydantic.ValidationError):
157+
with pytest.raises(LLMResponseValidationError):
157158
await consume_llm_message_chunks(
158159
client.stream_llm_message_chunks(**LLMFuncRequestWithTextContentMessagesFactory.build()),
159160
)

0 commit comments

Comments
 (0)