Skip to content

Commit a1b4dbc

Browse files
committed
Fixes openai#71: Added support for max_tokens in ModelSettings
1 parent c8f3cdd commit a1b4dbc

File tree

2 files changed

+3
-0
lines changed

2 files changed

+3
-0
lines changed

Diff for: src/agents/model_settings.py

+2
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ class ModelSettings:
1919
tool_choice: Literal["auto", "required", "none"] | str | None = None
2020
parallel_tool_calls: bool | None = False
2121
truncation: Literal["auto", "disabled"] | None = None
22+
max_tokens: int | None = None
2223

2324
def resolve(self, override: ModelSettings | None) -> ModelSettings:
2425
"""Produce a new ModelSettings by overlaying any non-None values from the
@@ -33,4 +34,5 @@ def resolve(self, override: ModelSettings | None) -> ModelSettings:
3334
tool_choice=override.tool_choice or self.tool_choice,
3435
parallel_tool_calls=override.parallel_tool_calls or self.parallel_tool_calls,
3536
truncation=override.truncation or self.truncation,
37+
max_tokens=override.max_tokens or self.max_tokens,
3638
)

Diff for: src/agents/models/openai_chatcompletions.py

+1
Original file line numberDiff line numberDiff line change
@@ -509,6 +509,7 @@ async def _fetch_response(
509509
stream=stream,
510510
stream_options={"include_usage": True} if stream else NOT_GIVEN,
511511
extra_headers=_HEADERS,
512+
max_tokens=self._non_null_or_not_given(model_settings.max_tokens),
512513
)
513514

514515
if isinstance(ret, ChatCompletion):

0 commit comments

Comments
 (0)