Skip to content

Commit 9c2680e

Browse files
rohitprasad15rohit-rptless
authored andcommitted
Support for Bedrock, and Anthropic
Also, code to normalize response to OpenAI format.
1 parent e940359 commit 9c2680e

File tree

2 files changed

+81
-4
lines changed

2 files changed

+81
-4
lines changed
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
import anthropic
2+
from provider import Provider
3+
4+
class AnthropicProvider(Provider):
5+
def __init__(self, **config):
6+
"""
7+
Initialize the Anthropic provider with the given configuration.
8+
Pass the entire configuration dictionary to the Anthropic client constructor.
9+
"""
10+
11+
self.client = anthropic.Anthropic(**config)
12+
13+
def chat_completions_create(self, model, messages, **kwargs):
14+
# Check if the fist message is a system message
15+
if messages[0]["role"] == "system":
16+
system_message = messages[0]["content"]
17+
messages = messages[1:]
18+
else:
19+
system_message = None
20+
21+
return self.normalize_response(self.client.messages.create(
22+
model=model,
23+
system=system_message,
24+
messages=messages,
25+
**kwargs
26+
))
27+
28+
def normalize_response(self, response):
29+
""" Normalize the response from the Anthropic API to match OpenAI's response format. """
30+
return {
31+
"choices": [
32+
{
33+
"message": {
34+
"role": response.get("role", "assistant"),
35+
"content": response.get("content", ""),
36+
}
37+
}
38+
]
39+
}
Lines changed: 42 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from anthropic import AnthropicBedrock
1+
import boto3
22
from provider import Provider, LLMError
33

44
class AWSBedrockProvider(Provider):
@@ -10,13 +10,51 @@ def __init__(self, **config):
1010
# Anthropic Bedrock client will use the default AWS credential providers, such as
1111
# using ~/.aws/credentials or the "AWS_SECRET_ACCESS_KEY" and "AWS_ACCESS_KEY_ID" environment variables.
1212
# Any overrides from the user is passed to the constructor.
13-
self.client = AnthropicBedrock(**config)
13+
self.client = boto3.client("bedrock-runtime", **config)
14+
# Maintain a list of Inference Parameters which Bedrock supports.
15+
# https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_InferenceConfiguration.html
16+
self.inference_parameters = ['maxTokens', 'temperature', 'topP', 'stopSequences']
17+
18+
def normalize_response(self, response):
19+
"""Normalize the response from the Bedrock API to match OpenAI's response format."""
20+
return {
21+
"choices": [
22+
{
23+
"message": {
24+
"content": response["output"]["message"]["content"] if response["output"].get("message") else "",
25+
"role": "assistant"
26+
},
27+
}
28+
]
29+
}
1430

1531
def chat_completions_create(self, model, messages, **kwargs):
1632
# Any exception raised by Anthropic will be returned to the caller.
1733
# Maybe we should catch them and raise a custom LLMError.
18-
return self.client.messages.create(
34+
system_message = None
35+
if messages[0]["role"] == "system":
36+
system_message = [messages[0]["content"]]
37+
messages = messages[1:]
38+
39+
# Maintain a list of Inference Parameters which Bedrock supports.
40+
# These fields need to be passed using inferenceConfig.
41+
# Rest all other fields are passed as additionalModelRequestFields.
42+
inference_config = {}
43+
additional_model_request_fields = {}
44+
45+
# Iterate over the kwargs and separate the inference parameters and additional model request fields.
46+
for key, value in kwargs.items():
47+
if key in self.inference_parameters:
48+
inference_config[key] = value
49+
else:
50+
additional_model_request_fields[key] = value
51+
52+
# Call the Bedrock Converse API.
53+
response = self.client.converse(
1954
model=model,
2055
messages=messages,
21-
**kwargs # Pass any additional arguments to the Anthropic API. Eg: max_tokens.
56+
system=system_message,
57+
inferenceConfig=inference_config,
58+
additionalModelRequestFields=additional_model_request_fields
2259
)
60+
return self.normalize_response(response)

0 commit comments

Comments
 (0)