Skip to content

Commit e722c25

Browse files
authored
Fix raising validation errors when streaming (#24)
1 parent 717b900 commit e722c25

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

any_llm_client/clients/openai.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -263,7 +263,7 @@ async def _iter_response_chunks(self, response: httpx.Response) -> typing.AsyncI
263263
validated_response = ChatCompletionsStreamingEvent.model_validate_json(event.data)
264264
except pydantic.ValidationError as validation_error:
265265
raise LLMResponseValidationError(
266-
response_content=response.content, original_error=validation_error
266+
response_content=event.data.encode(), original_error=validation_error
267267
) from validation_error
268268

269269
if not (

any_llm_client/clients/yandexgpt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -190,7 +190,7 @@ async def _iter_response_chunks(self, response: httpx.Response) -> typing.AsyncI
190190
validated_response = YandexGPTResponse.model_validate_json(one_line)
191191
except pydantic.ValidationError as validation_error:
192192
raise LLMResponseValidationError(
193-
response_content=response.content, original_error=validation_error
193+
response_content=one_line.encode(), original_error=validation_error
194194
) from validation_error
195195

196196
response_text = validated_response.result.alternatives[0].message.text

0 commit comments

Comments
 (0)