Skip to content

Commit

Permalink
fix: Handle the streaming of JSON delimited by newlines
Browse files Browse the repository at this point in the history
FUTURE_COPYBARA_INTEGRATE_REVIEW=#4861 from googleapis:release-please--branches--main 039f2cb
PiperOrigin-RevId: 719423860
  • Loading branch information
yeesian authored and copybara-github committed Jan 24, 2025
1 parent 713ffac commit c98780a
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 12 deletions.
6 changes: 3 additions & 3 deletions vertexai/reasoning_engines/_reasoning_engines.py
Original file line number Diff line number Diff line change
Expand Up @@ -840,9 +840,9 @@ def _method(self, **kwargs) -> Iterable[Any]:
),
)
for chunk in response:
parsed_json = _utils.to_parsed_json(chunk)
if parsed_json is not None:
yield parsed_json
for parsed_json in _utils.to_parsed_json(chunk):
if parsed_json is not None:
yield parsed_json

_method.__name__ = method_name
_method.__doc__ = doc
Expand Down
26 changes: 17 additions & 9 deletions vertexai/reasoning_engines/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,22 +104,30 @@ def to_parsed_json(body: httpbody_pb2.HttpBody) -> Any:
data = getattr(body, "data", None)

if content_type is None or data is None or "application/json" not in content_type:
return body
yield body
return

try:
utf8_data = data.decode("utf-8")
except Exception as e:
_LOGGER.warning(f"Failed to decode data: {data}. Exception: {e}")
return body
yield body
return

if not utf8_data:
return None

try:
return json.loads(utf8_data)
except Exception as e:
_LOGGER.warning(f"Failed to parse JSON: {utf8_data}. Exception: {e}")
return body # Return the raw body on error
yield None
return

# Handle the case of multiple dictionaries delimited by newlines.
for line in utf8_data.split("\n"):
if line:
try:
line = json.loads(line)
except Exception as e:
_LOGGER.warning(
f"failed to parse json: {line}. Exception: {e}"
)
yield line


def generate_schema(
Expand Down

0 comments on commit c98780a

Please sign in to comment.