From 311521e56ef83fda5806b3447ebb1f9c2aeac2d1 Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Tue, 30 Jul 2024 15:01:26 -0700 Subject: [PATCH] fix(ollama.py): correctly raise ollama streaming error Fixes https://github.com/BerriAI/litellm/issues/4974 --- litellm/llms/ollama.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/litellm/llms/ollama.py b/litellm/llms/ollama.py index 7b15582f4893..6b984e1d826e 100644 --- a/litellm/llms/ollama.py +++ b/litellm/llms/ollama.py @@ -258,7 +258,7 @@ def get_ollama_response( logging_obj=logging_obj, ) return response - elif stream == True: + elif stream is True: return ollama_completion_stream(url=url, data=data, logging_obj=logging_obj) response = requests.post( @@ -326,7 +326,7 @@ def ollama_completion_stream(url, data, logging_obj): try: if response.status_code != 200: raise OllamaError( - status_code=response.status_code, message=response.text + status_code=response.status_code, message=response.read() ) streamwrapper = litellm.CustomStreamWrapper(