From 5b47b2c542b9b4fb143af121022e2d5ad0890ef4 Mon Sep 17 00:00:00 2001
From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com>
Date: Mon, 20 Nov 2023 17:08:37 -0500
Subject: [PATCH 1/3] chore(examples): fix static types in assistants example
 (#852)

---
 examples/assistant.py | 30 ++++++++++++------------------
 1 file changed, 12 insertions(+), 18 deletions(-)

diff --git a/examples/assistant.py b/examples/assistant.py
index ad3c1376de..c5fbb82a3a 100644
--- a/examples/assistant.py
+++ b/examples/assistant.py
@@ -1,6 +1,7 @@
-import openai
 import time
 
+import openai
+
 # gets API Key from environment variable OPENAI_API_KEY
 client = openai.OpenAI()
 
@@ -16,38 +17,31 @@
 message = client.beta.threads.messages.create(
     thread_id=thread.id,
     role="user",
-    content="I need to solve the equation `3x + 11 = 14`. Can you help me?"
+    content="I need to solve the equation `3x + 11 = 14`. Can you help me?",
 )
 
 run = client.beta.threads.runs.create(
-  thread_id=thread.id,
-  assistant_id=assistant.id,
-  instructions="Please address the user as Jane Doe. The user has a premium account."
+    thread_id=thread.id,
+    assistant_id=assistant.id,
+    instructions="Please address the user as Jane Doe. The user has a premium account.",
 )
 
 print("checking assistant status. ")
 while True:
-    run = client.beta.threads.runs.retrieve(
-        thread_id=thread.id,
-        run_id=run.id
-    )
+    run = client.beta.threads.runs.retrieve(thread_id=thread.id, run_id=run.id)
 
     if run.status == "completed":
         print("done!")
-        messages = client.beta.threads.messages.list(
-            thread_id=thread.id
-        )
+        messages = client.beta.threads.messages.list(thread_id=thread.id)
 
         print("messages: ")
         for message in messages:
-            print({
-                "role": message.role,
-                "message": message.content[0].text.value
-            })
+            assert message.content[0].type == "text"
+            print({"role": message.role, "message": message.content[0].text.value})
 
         client.beta.assistants.delete(assistant.id)
-        
+
         break
     else:
         print("in progress...")
-        time.sleep(5)
\ No newline at end of file
+        time.sleep(5)

From ba5046611029a67714d5120b9cc6a3c7fecce10c Mon Sep 17 00:00:00 2001
From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com>
Date: Mon, 20 Nov 2023 19:24:05 -0500
Subject: [PATCH 2/3] fix(client): attempt to parse unknown json content types
 (#854)

---
 src/openai/_base_client.py | 20 ++++++++++++------
 src/openai/_models.py      | 13 ++++++++++++
 src/openai/_response.py    | 31 +++++++++++++++++++---------
 tests/test_client.py       | 42 ++++++++++++++++++++++++++++++++++++++
 4 files changed, 90 insertions(+), 16 deletions(-)

diff --git a/src/openai/_base_client.py b/src/openai/_base_client.py
index 3db8b6fa35..a168301f75 100644
--- a/src/openai/_base_client.py
+++ b/src/openai/_base_client.py
@@ -74,7 +74,12 @@
     RAW_RESPONSE_HEADER,
 )
 from ._streaming import Stream, AsyncStream
-from ._exceptions import APIStatusError, APITimeoutError, APIConnectionError
+from ._exceptions import (
+    APIStatusError,
+    APITimeoutError,
+    APIConnectionError,
+    APIResponseValidationError,
+)
 
 log: logging.Logger = logging.getLogger(__name__)
 
@@ -518,13 +523,16 @@ def _process_response_data(
         if cast_to is UnknownResponse:
             return cast(ResponseT, data)
 
-        if inspect.isclass(cast_to) and issubclass(cast_to, ModelBuilderProtocol):
-            return cast(ResponseT, cast_to.build(response=response, data=data))
+        try:
+            if inspect.isclass(cast_to) and issubclass(cast_to, ModelBuilderProtocol):
+                return cast(ResponseT, cast_to.build(response=response, data=data))
 
-        if self._strict_response_validation:
-            return cast(ResponseT, validate_type(type_=cast_to, value=data))
+            if self._strict_response_validation:
+                return cast(ResponseT, validate_type(type_=cast_to, value=data))
 
-        return cast(ResponseT, construct_type(type_=cast_to, value=data))
+            return cast(ResponseT, construct_type(type_=cast_to, value=data))
+        except pydantic.ValidationError as err:
+            raise APIResponseValidationError(response=response, body=data) from err
 
     @property
     def qs(self) -> Querystring:
diff --git a/src/openai/_models.py b/src/openai/_models.py
index 6d5aad5963..5b8c96010f 100644
--- a/src/openai/_models.py
+++ b/src/openai/_models.py
@@ -263,6 +263,19 @@ def _construct_field(value: object, field: FieldInfo, key: str) -> object:
     return construct_type(value=value, type_=type_)
 
 
+def is_basemodel(type_: type) -> bool:
+    """Returns whether or not the given type is either a `BaseModel` or a union of `BaseModel`"""
+    origin = get_origin(type_) or type_
+    if is_union(type_):
+        for variant in get_args(type_):
+            if is_basemodel(variant):
+                return True
+
+        return False
+
+    return issubclass(origin, BaseModel) or issubclass(origin, GenericModel)
+
+
 def construct_type(*, value: object, type_: type) -> object:
     """Loose coercion to the expected type with construction of nested values.
 
diff --git a/src/openai/_response.py b/src/openai/_response.py
index 3cc8fd8cc1..933c37525e 100644
--- a/src/openai/_response.py
+++ b/src/openai/_response.py
@@ -1,17 +1,17 @@
 from __future__ import annotations
 
 import inspect
+import logging
 import datetime
 import functools
 from typing import TYPE_CHECKING, Any, Union, Generic, TypeVar, Callable, cast
 from typing_extensions import Awaitable, ParamSpec, get_args, override, get_origin
 
 import httpx
-import pydantic
 
 from ._types import NoneType, UnknownResponse, BinaryResponseContent
 from ._utils import is_given
-from ._models import BaseModel
+from ._models import BaseModel, is_basemodel
 from ._constants import RAW_RESPONSE_HEADER
 from ._exceptions import APIResponseValidationError
 
@@ -23,6 +23,8 @@
 P = ParamSpec("P")
 R = TypeVar("R")
 
+log: logging.Logger = logging.getLogger(__name__)
+
 
 class APIResponse(Generic[R]):
     _cast_to: type[R]
@@ -174,6 +176,18 @@ def _parse(self) -> R:
         # in the response, e.g. application/json; charset=utf-8
         content_type, *_ = response.headers.get("content-type").split(";")
         if content_type != "application/json":
+            if is_basemodel(cast_to):
+                try:
+                    data = response.json()
+                except Exception as exc:
+                    log.debug("Could not read JSON from response data due to %s - %s", type(exc), exc)
+                else:
+                    return self._client._process_response_data(
+                        data=data,
+                        cast_to=cast_to,  # type: ignore
+                        response=response,
+                    )
+
             if self._client._strict_response_validation:
                 raise APIResponseValidationError(
                     response=response,
@@ -188,14 +202,11 @@ def _parse(self) -> R:
 
         data = response.json()
 
-        try:
-            return self._client._process_response_data(
-                data=data,
-                cast_to=cast_to,  # type: ignore
-                response=response,
-            )
-        except pydantic.ValidationError as err:
-            raise APIResponseValidationError(response=response, body=data) from err
+        return self._client._process_response_data(
+            data=data,
+            cast_to=cast_to,  # type: ignore
+            response=response,
+        )
 
     @override
     def __repr__(self) -> str:
diff --git a/tests/test_client.py b/tests/test_client.py
index e295d193e8..c5dbfe4bfe 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -401,6 +401,27 @@ class Model2(BaseModel):
         assert isinstance(response, Model1)
         assert response.foo == 1
 
+    @pytest.mark.respx(base_url=base_url)
+    def test_non_application_json_content_type_for_json_data(self, respx_mock: MockRouter) -> None:
+        """
+        Response that sets Content-Type to something other than application/json but returns json data
+        """
+
+        class Model(BaseModel):
+            foo: int
+
+        respx_mock.get("/foo").mock(
+            return_value=httpx.Response(
+                200,
+                content=json.dumps({"foo": 2}),
+                headers={"Content-Type": "application/text"},
+            )
+        )
+
+        response = self.client.get("/foo", cast_to=Model)
+        assert isinstance(response, Model)
+        assert response.foo == 2
+
     def test_base_url_env(self) -> None:
         with update_env(OPENAI_BASE_URL="http://localhost:5000/from/env"):
             client = OpenAI(api_key=api_key, _strict_response_validation=True)
@@ -939,6 +960,27 @@ class Model2(BaseModel):
         assert isinstance(response, Model1)
         assert response.foo == 1
 
+    @pytest.mark.respx(base_url=base_url)
+    async def test_non_application_json_content_type_for_json_data(self, respx_mock: MockRouter) -> None:
+        """
+        Response that sets Content-Type to something other than application/json but returns json data
+        """
+
+        class Model(BaseModel):
+            foo: int
+
+        respx_mock.get("/foo").mock(
+            return_value=httpx.Response(
+                200,
+                content=json.dumps({"foo": 2}),
+                headers={"Content-Type": "application/text"},
+            )
+        )
+
+        response = await self.client.get("/foo", cast_to=Model)
+        assert isinstance(response, Model)
+        assert response.foo == 2
+
     def test_base_url_env(self) -> None:
         with update_env(OPENAI_BASE_URL="http://localhost:5000/from/env"):
             client = AsyncOpenAI(api_key=api_key, _strict_response_validation=True)

From d5a2547bfb43b6bf5c561d178e82d5c5b5c33376 Mon Sep 17 00:00:00 2001
From: Stainless Bot <107565488+stainless-bot@users.noreply.github.com>
Date: Mon, 20 Nov 2023 19:24:46 -0500
Subject: [PATCH 3/3] release: 1.3.4

---
 .release-please-manifest.json |  2 +-
 CHANGELOG.md                  | 13 +++++++++++++
 pyproject.toml                |  2 +-
 src/openai/_version.py        |  2 +-
 4 files changed, 16 insertions(+), 3 deletions(-)

diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index 8022176dd3..c050b0fe03 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
 {
-  ".": "1.3.3"
+  ".": "1.3.4"
 }
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 970724b4b8..1caef71db9 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,18 @@
 # Changelog
 
+## 1.3.4 (2023-11-21)
+
+Full Changelog: [v1.3.3...v1.3.4](https://github.com/openai/openai-python/compare/v1.3.3...v1.3.4)
+
+### Bug Fixes
+
+* **client:** attempt to parse unknown json content types ([#854](https://github.com/openai/openai-python/issues/854)) ([ba50466](https://github.com/openai/openai-python/commit/ba5046611029a67714d5120b9cc6a3c7fecce10c))
+
+
+### Chores
+
+* **examples:** fix static types in assistants example ([#852](https://github.com/openai/openai-python/issues/852)) ([5b47b2c](https://github.com/openai/openai-python/commit/5b47b2c542b9b4fb143af121022e2d5ad0890ef4))
+
 ## 1.3.3 (2023-11-17)
 
 Full Changelog: [v1.3.2...v1.3.3](https://github.com/openai/openai-python/compare/v1.3.2...v1.3.3)
diff --git a/pyproject.toml b/pyproject.toml
index 8c9c6022f2..ae6fbaeeca 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
 [project]
 name = "openai"
-version = "1.3.3"
+version = "1.3.4"
 description = "The official Python library for the openai API"
 readme = "README.md"
 license = "Apache-2.0"
diff --git a/src/openai/_version.py b/src/openai/_version.py
index b04859b6bb..ddfc847864 100644
--- a/src/openai/_version.py
+++ b/src/openai/_version.py
@@ -1,4 +1,4 @@
 # File generated from our OpenAPI spec by Stainless.
 
 __title__ = "openai"
-__version__ = "1.3.3"  # x-release-please-version
+__version__ = "1.3.4"  # x-release-please-version