Skip to content

Commit

Permalink
Litellm dev 02 07 2025 p3 (#8387)
Browse files Browse the repository at this point in the history
* add back streaming for base o3 (#8361)

* test(base_llm_unit_tests.py): add base test for o-series models - ensure streaming always works

* fix(base_llm_unit_tests.py): fix test for o series models

* refactor: move test

---------

Co-authored-by: Matteo Boschini <[email protected]>
  • Loading branch information
krrishdholakia and mbosc authored Feb 8, 2025
1 parent b242c66 commit c83498f
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 1 deletion.
2 changes: 1 addition & 1 deletion litellm/llms/openai/chat/o_series_transformation.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def should_fake_stream(

if model is None:
return True
supported_stream_models = ["o1-mini", "o1-preview"]
supported_stream_models = ["o1-mini", "o1-preview", "o3-mini"]
for supported_model in supported_stream_models:
if supported_model in model:
return False
Expand Down
23 changes: 23 additions & 0 deletions tests/llm_translation/test_openai_o1.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,3 +204,26 @@ def test_o3_reasoning_effort():
reasoning_effort="high",
)
assert resp.choices[0].message.content is not None


def test_streaming_response():
"""Test that streaming response is returned correctly"""
from litellm import completion

response = completion(
model="o3-mini",
messages=[
{"role": "system", "content": "Be a good bot!"},
{"role": "user", "content": "Hello!"},
],
stream=True,
)

assert response is not None

chunks = []
for chunk in response:
chunks.append(chunk)

resp = litellm.stream_chunk_builder(chunks=chunks)
print(resp)

0 comments on commit c83498f

Please sign in to comment.