From cbfbde45e6011ed590a02491381a616e81ca059d Mon Sep 17 00:00:00 2001 From: Luis Rueda Date: Sat, 15 Jun 2024 12:32:48 -0500 Subject: [PATCH 1/6] Add default to None for optional fields --- libs/aws/langchain_aws/chat_models/bedrock_converse.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libs/aws/langchain_aws/chat_models/bedrock_converse.py b/libs/aws/langchain_aws/chat_models/bedrock_converse.py index c1b3748f..38f2319d 100644 --- a/libs/aws/langchain_aws/chat_models/bedrock_converse.py +++ b/libs/aws/langchain_aws/chat_models/bedrock_converse.py @@ -268,7 +268,7 @@ class Joke(BaseModel): max_tokens: Optional[int] = None """Max tokens to generate.""" - stop_sequences: Optional[List[str]] = Field(None, alias="stop") + stop_sequences: Optional[List[str]] = Field(default=None, alias="stop") """Stop generation if any of these substrings occurs.""" temperature: Optional[float] = None @@ -308,7 +308,7 @@ class Joke(BaseModel): have an ARN associated with them. """ - endpoint_url: Optional[str] = Field(None, alias="base_url") + endpoint_url: Optional[str] = Field(default=None, alias="base_url") """Needed if you don't want to default to us-east-1 endpoint""" config: Any = None From 4998d0a86f23635fce87aaafd0c15a658e59ccb5 Mon Sep 17 00:00:00 2001 From: Luis Rueda Date: Sat, 15 Jun 2024 15:52:54 -0500 Subject: [PATCH 2/6] Fixes some typos found during testing --- libs/aws/langchain_aws/chat_models/bedrock_converse.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/libs/aws/langchain_aws/chat_models/bedrock_converse.py b/libs/aws/langchain_aws/chat_models/bedrock_converse.py index 38f2319d..aa378257 100644 --- a/libs/aws/langchain_aws/chat_models/bedrock_converse.py +++ b/libs/aws/langchain_aws/chat_models/bedrock_converse.py @@ -468,7 +468,6 @@ def _converse_params( if not toolConfig and tools: toolChoice = _format_tool_choice(toolChoice) if toolChoice else None toolConfig = {"tools": _format_tools(tools), "toolChoice": toolChoice} - return _drop_none( { "modelId": modelId or self.model_id, @@ -667,7 +666,7 @@ def _anthropic_to_bedrock( bedrock_content.append( { "toolResult": { - "toolUseId": block["toolUseId"], + "toolUseId": block["tool_use_id"], "content": _anthropic_to_bedrock(content), } } From 11bc0e036ed73c08368397e5935ae48c66a75f97 Mon Sep 17 00:00:00 2001 From: Luis Rueda Date: Sat, 15 Jun 2024 16:24:13 -0500 Subject: [PATCH 3/6] Add function to convert to a dictionary Anthropic returns the input dictionary for the tool as a string. Adding a function to attempt to convert it to a dict if it is a string, if conversion fails, returns the string --- .../langchain_aws/chat_models/bedrock_converse.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/libs/aws/langchain_aws/chat_models/bedrock_converse.py b/libs/aws/langchain_aws/chat_models/bedrock_converse.py index aa378257..0c6864c0 100644 --- a/libs/aws/langchain_aws/chat_models/bedrock_converse.py +++ b/libs/aws/langchain_aws/chat_models/bedrock_converse.py @@ -657,7 +657,7 @@ def _anthropic_to_bedrock( { "toolUse": { "toolUseId": block["id"], - "input": block["input"], + "input": _try_to_convert_to_dict(block["input"]), "name": block["name"], } } @@ -860,3 +860,13 @@ def _format_openai_image_url(image_url: str) -> Dict: "format": match.group("media_type"), "source": {"bytes": _b64str_to_bytes(match.group("data"))}, } + + +def _try_to_convert_to_dict(tool_use_input: Any) -> Any: + """Attempt to convert the toolUse.input to a dictionary.""" + if isinstance(tool_use_input, str): + try: + return json.loads(tool_use_input) + except json.JSONDecodeError: + return tool_use_input + return tool_use_input From b6d235c7aa2024e8322cb31e1f931dbeea9874a5 Mon Sep 17 00:00:00 2001 From: Luis Rueda Date: Sat, 15 Jun 2024 17:00:14 -0500 Subject: [PATCH 4/6] Fixes a problem where formatted tools would break It seems like the first time we are calling _format_tools everything works fine, but on a second run, the _snake_case_to_camel_case converts the input parameters to camelCase and breaks the tools. --- .../langchain_aws/chat_models/bedrock_converse.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/libs/aws/langchain_aws/chat_models/bedrock_converse.py b/libs/aws/langchain_aws/chat_models/bedrock_converse.py index 0c6864c0..591620f9 100644 --- a/libs/aws/langchain_aws/chat_models/bedrock_converse.py +++ b/libs/aws/langchain_aws/chat_models/bedrock_converse.py @@ -268,7 +268,7 @@ class Joke(BaseModel): max_tokens: Optional[int] = None """Max tokens to generate.""" - stop_sequences: Optional[List[str]] = Field(default=None, alias="stop") + stop_sequences: Optional[List[str]] = Field(None, alias="stop") """Stop generation if any of these substrings occurs.""" temperature: Optional[float] = None @@ -308,12 +308,17 @@ class Joke(BaseModel): have an ARN associated with them. """ - endpoint_url: Optional[str] = Field(default=None, alias="base_url") + endpoint_url: Optional[str] = Field(None, alias="base_url") """Needed if you don't want to default to us-east-1 endpoint""" config: Any = None """An optional botocore.config.Config instance to pass to the client.""" + formatted_tools: List[ + Dict[Literal["toolSpec"], Dict[str, Union[Dict[str, Any], str]]] + ] = Field(default_factory=list, exclude=True) + """"Formatted tools to be stored and used in the toolConfig parameter.""" + class Config: """Configuration for this pydantic object.""" @@ -413,7 +418,8 @@ def bind_tools( ) -> Runnable[LanguageModelInput, BaseMessage]: if tool_choice: kwargs["tool_choice"] = _format_tool_choice(tool_choice) - return self.bind(tools=_format_tools(tools), **kwargs) + self.formatted_tools = _format_tools(tools) + return self.bind(tools=self.formatted_tools, **kwargs) def with_structured_output( self, @@ -467,7 +473,7 @@ def _converse_params( } if not toolConfig and tools: toolChoice = _format_tool_choice(toolChoice) if toolChoice else None - toolConfig = {"tools": _format_tools(tools), "toolChoice": toolChoice} + toolConfig = {"tools": self.formatted_tools, "toolChoice": toolChoice} return _drop_none( { "modelId": modelId or self.model_id, From f1d9e613b67af5c524c65a645262eff17d7b40e2 Mon Sep 17 00:00:00 2001 From: Luis Rueda Date: Sat, 22 Jun 2024 11:37:55 -0500 Subject: [PATCH 5/6] Add default=None to avoid typing errors --- libs/aws/langchain_aws/chat_models/bedrock_converse.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libs/aws/langchain_aws/chat_models/bedrock_converse.py b/libs/aws/langchain_aws/chat_models/bedrock_converse.py index 591620f9..340dcc71 100644 --- a/libs/aws/langchain_aws/chat_models/bedrock_converse.py +++ b/libs/aws/langchain_aws/chat_models/bedrock_converse.py @@ -268,7 +268,7 @@ class Joke(BaseModel): max_tokens: Optional[int] = None """Max tokens to generate.""" - stop_sequences: Optional[List[str]] = Field(None, alias="stop") + stop_sequences: Optional[List[str]] = Field(default=None, alias="stop") """Stop generation if any of these substrings occurs.""" temperature: Optional[float] = None @@ -308,7 +308,7 @@ class Joke(BaseModel): have an ARN associated with them. """ - endpoint_url: Optional[str] = Field(None, alias="base_url") + endpoint_url: Optional[str] = Field(default=None, alias="base_url") """Needed if you don't want to default to us-east-1 endpoint""" config: Any = None From a415ff96c74e531338abb9fd7cf3519194a3a4d6 Mon Sep 17 00:00:00 2001 From: Luis Rueda Date: Mon, 24 Jun 2024 18:44:20 -0500 Subject: [PATCH 6/6] fmt --- libs/aws/langchain_aws/chat_models/bedrock_converse.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/libs/aws/langchain_aws/chat_models/bedrock_converse.py b/libs/aws/langchain_aws/chat_models/bedrock_converse.py index de03417e..16907691 100644 --- a/libs/aws/langchain_aws/chat_models/bedrock_converse.py +++ b/libs/aws/langchain_aws/chat_models/bedrock_converse.py @@ -662,14 +662,9 @@ def _anthropic_to_bedrock( bedrock_content.append( { "toolResult": { -<<<<<<< userlerueda/bedrock_converse-mod-init - "toolUseId": block["tool_use_id"], - "content": _anthropic_to_bedrock(content), -======= "toolUseId": block["toolUseId"], "content": _anthropic_to_bedrock(block["content"]), "status": "error" if block.get("isError") else "success", ->>>>>>> main } } )