Skip to content

Commit 70135bf

Browse files
committed
fixes for bedrock invoke
1 parent b4e4393 commit 70135bf

File tree

2 files changed

+51
-18
lines changed

2 files changed

+51
-18
lines changed

litellm/llms/bedrock/chat/invoke_transformations/anthropic_claude3_transformation.py

+36-1
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
LiteLLMLoggingObj = Any
2424

2525

26-
class AmazonAnthropicClaude3Config(AmazonInvokeConfig, AnthropicConfig):
26+
class AmazonAnthropicClaude3Config(AmazonInvokeConfig):
2727
"""
2828
Reference:
2929
https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/providers?model=claude
@@ -34,6 +34,41 @@ class AmazonAnthropicClaude3Config(AmazonInvokeConfig, AnthropicConfig):
3434

3535
anthropic_version: str = "bedrock-2023-05-31"
3636

37+
def get_supported_openai_params(self, model: str):
38+
return [
39+
"max_tokens",
40+
"max_completion_tokens",
41+
"tools",
42+
"tool_choice",
43+
"stream",
44+
"stop",
45+
"temperature",
46+
"top_p",
47+
"extra_headers",
48+
]
49+
50+
def map_openai_params(
51+
self,
52+
non_default_params: dict,
53+
optional_params: dict,
54+
model: str,
55+
drop_params: bool,
56+
):
57+
for param, value in non_default_params.items():
58+
if param == "max_tokens" or param == "max_completion_tokens":
59+
optional_params["max_tokens"] = value
60+
if param == "tools":
61+
optional_params["tools"] = value
62+
if param == "stream":
63+
optional_params["stream"] = value
64+
if param == "stop":
65+
optional_params["stop_sequences"] = value
66+
if param == "temperature":
67+
optional_params["temperature"] = value
68+
if param == "top_p":
69+
optional_params["top_p"] = value
70+
return optional_params
71+
3772
def transform_request(
3873
self,
3974
model: str,

litellm/utils.py

+15-17
Original file line numberDiff line numberDiff line change
@@ -3189,8 +3189,8 @@ def _check_valid_arg(supported_params: List[str]):
31893189
),
31903190
)
31913191
elif custom_llm_provider == "bedrock":
3192-
base_model = BedrockModelInfo.get_base_model(model)
3193-
if base_model in litellm.bedrock_converse_models:
3192+
bedrock_route = BedrockModelInfo.get_bedrock_route(model)
3193+
if bedrock_route == "converse" or bedrock_route == "converse_like":
31943194
optional_params = litellm.AmazonConverseConfig().map_openai_params(
31953195
model=model,
31963196
non_default_params=non_default_params,
@@ -3203,22 +3203,20 @@ def _check_valid_arg(supported_params: List[str]):
32033203
messages=messages,
32043204
)
32053205

3206-
elif "anthropic" in model:
3207-
if "aws_bedrock_client" in passed_params: # deprecated boto3.invoke route.
3208-
if model.startswith("anthropic.claude-3"):
3209-
optional_params = (
3210-
litellm.AmazonAnthropicClaude3Config().map_openai_params(
3211-
non_default_params=non_default_params,
3212-
optional_params=optional_params,
3213-
model=model,
3214-
drop_params=(
3215-
drop_params
3216-
if drop_params is not None
3217-
and isinstance(drop_params, bool)
3218-
else False
3219-
),
3220-
)
3206+
elif "anthropic" in model and bedrock_route == "invoke":
3207+
if model.startswith("anthropic.claude-3"):
3208+
optional_params = (
3209+
litellm.AmazonAnthropicClaude3Config().map_openai_params(
3210+
non_default_params=non_default_params,
3211+
optional_params=optional_params,
3212+
model=model,
3213+
drop_params=(
3214+
drop_params
3215+
if drop_params is not None and isinstance(drop_params, bool)
3216+
else False
3217+
),
32213218
)
3219+
)
32223220
else:
32233221
optional_params = litellm.AmazonAnthropicConfig().map_openai_params(
32243222
non_default_params=non_default_params,

0 commit comments

Comments
 (0)