Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions src/agents/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -640,6 +640,7 @@ async def run(
model_responses.append(turn_result.model_response)
original_input = turn_result.original_input
generated_items = turn_result.generated_items
context_wrapper.run_items = generated_items

if server_conversation_tracker is not None:
server_conversation_tracker.track_server_items(turn_result.model_response)
Expand Down Expand Up @@ -1080,6 +1081,7 @@ async def _start_streaming(
]
streamed_result.input = turn_result.original_input
streamed_result.new_items = turn_result.generated_items
context_wrapper.run_items = turn_result.generated_items

if server_conversation_tracker is not None:
server_conversation_tracker.track_server_items(turn_result.model_response)
Expand Down
14 changes: 11 additions & 3 deletions src/agents/run_context.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,21 @@
from dataclasses import dataclass, field
from typing import Any, Generic
from typing import Any, Generic, TYPE_CHECKING

from typing_extensions import TypeVar

from typing_extensions import TypeVar
from .usage import Usage

if TYPE_CHECKING:
from .items import RunItem

TContext = TypeVar("TContext", default=Any)



@dataclass
class RunContextWrapper(Generic[TContext]):
"""This wraps the context object that you passed to `Runner.run()`. It also contains
"""
This wraps the context object that you passed to `Runner.run()`. It also contains
information about the usage of the agent run so far.

NOTE: Contexts are not passed to the LLM. They're a way to pass dependencies and data to code
Expand All @@ -24,3 +29,6 @@ class RunContextWrapper(Generic[TContext]):
"""The usage of the agent run so far. For streamed responses, the usage will be stale until the
last chunk of the stream is processed.
"""

run_items: list["RunItem"] = field(default_factory=list)
"""The items generated by the agent so far."""