Skip to content

Commit 043c0c7

Browse files
authored
Merge pull request #175 from Riddhimaan-Senapati/main
Added support for deepseek LLMs
2 parents 206dfab + abe1059 commit 043c0c7

File tree

4 files changed

+127
-0
lines changed

4 files changed

+127
-0
lines changed
+34
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
import openai
2+
import os
3+
from aisuite.provider import Provider, LLMError
4+
5+
6+
class DeepseekProvider(Provider):
7+
def __init__(self, **config):
8+
"""
9+
Initialize the DeepSeek provider with the given configuration.
10+
Pass the entire configuration dictionary to the OpenAI client constructor.
11+
"""
12+
# Ensure API key is provided either in config or via environment variable
13+
config.setdefault("api_key", os.getenv("DEEPSEEK_API_KEY"))
14+
if not config["api_key"]:
15+
raise ValueError(
16+
"DeepSeek API key is missing. Please provide it in the config or set the OPENAI_API_KEY environment variable."
17+
)
18+
config["base_url"] = "https://api.deepseek.com"
19+
20+
# NOTE: We could choose to remove above lines for api_key since OpenAI will automatically
21+
# infer certain values from the environment variables.
22+
# Eg: OPENAI_API_KEY, OPENAI_ORG_ID, OPENAI_PROJECT_ID. Except for OPEN_AI_BASE_URL which has to be the deepseek url
23+
24+
# Pass the entire config to the OpenAI client constructor
25+
self.client = openai.OpenAI(**config)
26+
27+
def chat_completions_create(self, model, messages, **kwargs):
28+
# Any exception raised by OpenAI will be returned to the caller.
29+
# Maybe we should catch them and raise a custom LLMError.
30+
return self.client.chat.completions.create(
31+
model=model,
32+
messages=messages,
33+
**kwargs # Pass any additional arguments to the OpenAI API
34+
)

guides/README.md

+1
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ Here are the instructions for:
1212
- [OpenAI](openai.md)
1313
- [SambaNova](sambanova.md)
1414
- [xAI](xai.md)
15+
- [DeepSeek](deepseek.md)
1516

1617
Unless otherwise stated, these guides have not been endorsed by the providers.
1718

guides/deepseek.md

+46
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
# DeepSeek
2+
3+
To use DeepSeek with `aisuite`, you’ll need an [DeepSeek account](https://platform.deepseek.com). After logging in, go to the [API Keys](https://platform.deepseek.com/api_keys) section in your account settings and generate a new key. Once you have your key, add it to your environment as follows:
4+
5+
```shell
6+
export DEEPSEEK_API_KEY="your-deepseek-api-key"
7+
```
8+
9+
## Create a Chat Completion
10+
11+
(Note: The DeepSeek uses an API format consistent with OpenAI, hence why we need to install OpenAI, there is no DeepSeek Library at least not for now)
12+
13+
Install the `openai` Python client:
14+
15+
Example with pip:
16+
```shell
17+
pip install openai
18+
```
19+
20+
Example with poetry:
21+
```shell
22+
poetry add openai
23+
```
24+
25+
In your code:
26+
```python
27+
import aisuite as ai
28+
client = ai.Client()
29+
30+
provider = "deepseek"
31+
model_id = "deepseek-chat"
32+
33+
messages = [
34+
{"role": "system", "content": "You are a helpful assistant."},
35+
{"role": "user", "content": "What’s the weather like in San Francisco?"},
36+
]
37+
38+
response = client.chat.completions.create(
39+
model=f"{provider}:{model_id}",
40+
messages=messages,
41+
)
42+
43+
print(response.choices[0].message.content)
44+
```
45+
46+
Happy coding! If you’d like to contribute, please read our [Contributing Guide](../CONTRIBUTING.md).
+46
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
from unittest.mock import MagicMock, patch
2+
3+
import pytest
4+
5+
from aisuite.providers.deepseek_provider import DeepseekProvider
6+
7+
8+
@pytest.fixture(autouse=True)
9+
def set_api_key_env_var(monkeypatch):
10+
"""Fixture to set environment variables for tests."""
11+
monkeypatch.setenv("DEEPSEEK_API_KEY", "test-api-key")
12+
13+
14+
def test_groq_provider():
15+
"""High-level test that the provider is initialized and chat completions are requested successfully."""
16+
17+
user_greeting = "Hello!"
18+
message_history = [{"role": "user", "content": user_greeting}]
19+
selected_model = "our-favorite-model"
20+
chosen_temperature = 0.75
21+
response_text_content = "mocked-text-response-from-model"
22+
23+
provider = DeepseekProvider()
24+
mock_response = MagicMock()
25+
mock_response.choices = [MagicMock()]
26+
mock_response.choices[0].message = MagicMock()
27+
mock_response.choices[0].message.content = response_text_content
28+
29+
with patch.object(
30+
provider.client.chat.completions,
31+
"create",
32+
return_value=mock_response,
33+
) as mock_create:
34+
response = provider.chat_completions_create(
35+
messages=message_history,
36+
model=selected_model,
37+
temperature=chosen_temperature,
38+
)
39+
40+
mock_create.assert_called_with(
41+
messages=message_history,
42+
model=selected_model,
43+
temperature=chosen_temperature,
44+
)
45+
46+
assert response.choices[0].message.content == response_text_content

0 commit comments

Comments
 (0)