From a0eb853f0ce6900ecee884b87f9352e9e9186b0c Mon Sep 17 00:00:00 2001 From: Evgeny Lychkovsky Date: Mon, 12 May 2025 00:49:55 +0200 Subject: [PATCH] fix openai chat completions invokation: max_tokens -> max_completion_tokens --- src/agents/models/openai_chatcompletions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/agents/models/openai_chatcompletions.py b/src/agents/models/openai_chatcompletions.py index 89619f838..0d5902aa7 100644 --- a/src/agents/models/openai_chatcompletions.py +++ b/src/agents/models/openai_chatcompletions.py @@ -244,7 +244,7 @@ async def _fetch_response( top_p=self._non_null_or_not_given(model_settings.top_p), frequency_penalty=self._non_null_or_not_given(model_settings.frequency_penalty), presence_penalty=self._non_null_or_not_given(model_settings.presence_penalty), - max_tokens=self._non_null_or_not_given(model_settings.max_tokens), + max_completion_tokens=self._non_null_or_not_given(model_settings.max_tokens), tool_choice=tool_choice, response_format=response_format, parallel_tool_calls=parallel_tool_calls,