Skip to content

Commit af9aba4

Browse files
committed
fix
1 parent 741da67 commit af9aba4

File tree

2 files changed

+27
-3
lines changed

2 files changed

+27
-3
lines changed

src/agents/extensions/models/litellm_model.py

Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import json
44
import time
55
from collections.abc import AsyncIterator
6-
from typing import Any, Literal, cast, overload
6+
from typing import Any, Literal, Optional, cast, overload
77

88
from openai.types.responses.response_usage import InputTokensDetails, OutputTokensDetails
99

@@ -45,6 +45,14 @@
4545
from ...usage import Usage
4646

4747

48+
class InternalChatCompletionMessage(ChatCompletionMessage):
49+
"""
50+
An internal subclass to carry reasoning_content without modifying the original model.
51+
"""
52+
53+
reasoning_content: Optional[str] = None
54+
55+
4856
class LitellmModel(Model):
4957
"""This class enables using any model via LiteLLM. LiteLLM allows you to acess OpenAPI,
5058
Anthropic, Gemini, Mistral, and many other models.
@@ -364,13 +372,18 @@ def convert_message_to_openai(
364372
provider_specific_fields.get("refusal", None) if provider_specific_fields else None
365373
)
366374

367-
return ChatCompletionMessage(
375+
reasoning_content = ""
376+
if hasattr(message, "reasoning_content") and message.reasoning_content:
377+
reasoning_content = message.reasoning_content
378+
379+
return InternalChatCompletionMessage(
368380
content=message.content,
369381
refusal=refusal,
370382
role="assistant",
371383
annotations=cls.convert_annotations_to_openai(message),
372384
audio=message.get("audio", None), # litellm deletes audio if not present
373385
tool_calls=tool_calls,
386+
reasoning_content=reasoning_content,
374387
)
375388

376389
@classmethod

src/agents/models/chatcmpl_converter.py

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@
3636
ResponseOutputRefusal,
3737
ResponseOutputText,
3838
ResponseReasoningItem,
39+
ResponseReasoningItemParam,
3940
)
4041
from openai.types.responses.response_input_param import FunctionCallOutput, ItemReference, Message
4142
from openai.types.responses.response_reasoning_item import Summary
@@ -210,6 +211,12 @@ def maybe_response_output_message(cls, item: Any) -> ResponseOutputMessageParam
210211
return cast(ResponseOutputMessageParam, item)
211212
return None
212213

214+
@classmethod
215+
def maybe_reasoning_message(cls, item: Any) -> ResponseReasoningItemParam | None:
216+
if isinstance(item, dict) and item.get("type") == "reasoning":
217+
return cast(ResponseReasoningItemParam, item)
218+
return None
219+
213220
@classmethod
214221
def extract_text_content(
215222
cls, content: str | Iterable[ResponseInputContentParam]
@@ -459,7 +466,11 @@ def ensure_assistant_message() -> ChatCompletionAssistantMessageParam:
459466
f"Encountered an item_reference, which is not supported: {item_ref}"
460467
)
461468

462-
# 7) If we haven't recognized it => fail or ignore
469+
# 7) reasoning message => not handled
470+
elif cls.maybe_reasoning_message(item):
471+
pass
472+
473+
# 8) If we haven't recognized it => fail or ignore
463474
else:
464475
raise UserError(f"Unhandled item type or structure: {item}")
465476

0 commit comments

Comments
 (0)