From 1f0ba4770b8f67d978afb0ff802b86af5e6cfb43 Mon Sep 17 00:00:00 2001 From: Rohan Mehta Date: Tue, 15 Apr 2025 12:43:28 -0400 Subject: [PATCH] Only include stream_options when streaming --- src/agents/models/openai_chatcompletions.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/agents/models/openai_chatcompletions.py b/src/agents/models/openai_chatcompletions.py index 712a7998..a4f93cb7 100644 --- a/src/agents/models/openai_chatcompletions.py +++ b/src/agents/models/openai_chatcompletions.py @@ -528,7 +528,9 @@ async def _fetch_response( reasoning_effort = model_settings.reasoning.effort if model_settings.reasoning else None store = _Converter.get_store_param(self._get_client(), model_settings) - stream_options = _Converter.get_stream_options_param(self._get_client(), model_settings) + stream_options = _Converter.get_stream_options_param( + self._get_client(), model_settings, stream=stream + ) ret = await self._get_client().chat.completions.create( model=self.model, @@ -591,8 +593,11 @@ def get_store_param(cls, client: AsyncOpenAI, model_settings: ModelSettings) -> @classmethod def get_stream_options_param( - cls, client: AsyncOpenAI, model_settings: ModelSettings + cls, client: AsyncOpenAI, model_settings: ModelSettings, stream: bool ) -> dict[str, bool] | None: + if not stream: + return None + default_include_usage = True if cls.is_openai(client) else None include_usage = ( model_settings.include_usage