Skip to content

Commit 221b04b

Browse files
committed
added providers fireworks and replciate with test code and outputs in multi_fm_client.ipynb
1 parent 43f8a8a commit 221b04b

File tree

5 files changed

+202
-52
lines changed

5 files changed

+202
-52
lines changed

aimodels/client/multi_fm_client.py

+4
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,8 @@
77
MistralInterface,
88
OllamaInterface,
99
OpenAIInterface,
10+
FireworksInterface,
11+
ReplicateInterface,
1012
)
1113

1214

@@ -38,6 +40,8 @@ def __init__(self):
3840
"mistral": MistralInterface,
3941
"ollama": OllamaInterface,
4042
"openai": OpenAIInterface,
43+
"fireworks": FireworksInterface,
44+
"replicate": ReplicateInterface,
4145
}
4246

4347
def get_provider_interface(self, model):

aimodels/providers/__init__.py

+2
Original file line numberDiff line numberDiff line change
@@ -5,3 +5,5 @@
55
from .mistral_interface import MistralInterface
66
from .ollama_interface import OllamaInterface
77
from .openai_interface import OpenAIInterface
8+
from .fireworks_interface import FireworksInterface
9+
from .replicate_interface import ReplicateInterface
+34
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
"""The interface to the Fireworks API."""
2+
3+
import os
4+
5+
from ..framework.provider_interface import ProviderInterface
6+
7+
class FireworksInterface(ProviderInterface):
8+
"""Implements the ProviderInterface for interacting with Fireworks's APIs."""
9+
10+
def __init__(self):
11+
"""Set up the Fireworks client using the API key obtained from the user's environment."""
12+
from fireworks.client import Fireworks
13+
14+
self.fireworks_client = Fireworks(api_key=os.getenv("FIREWORKS_API_KEY"))
15+
16+
def chat_completion_create(self, messages=None, model=None, temperature=0):
17+
"""Request chat completions from the Fireworks API.
18+
19+
Args:
20+
----
21+
model (str): Identifies the specific provider/model to use.
22+
messages (list of dict): A list of message objects in chat history.
23+
temperature (float): The temperature to use in the completion.
24+
25+
Returns:
26+
-------
27+
The API response with the completion result.
28+
29+
"""
30+
return self.fireworks_client.chat.completions.create(
31+
model=model,
32+
messages=messages,
33+
temperature=temperature,
34+
)
+34
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
"""The interface to the Replicate API."""
2+
3+
import os
4+
5+
from ..framework.provider_interface import ProviderInterface
6+
7+
class ReplicateInterface(ProviderInterface):
8+
"""Implements the ProviderInterface for interacting with Replicate's APIs."""
9+
10+
def __init__(self):
11+
"""Set up the Replicate client using the API key obtained from the user's environment."""
12+
from openai import OpenAI
13+
14+
self.replicate_client = OpenAI(api_key=os.getenv("REPLICATE_API_KEY"), base_url="https://openai-proxy.replicate.com/v1")
15+
16+
def chat_completion_create(self, messages=None, model=None, temperature=0):
17+
"""Request chat completions from the Replicate API.
18+
19+
Args:
20+
----
21+
model (str): Identifies the specific provider/model to use.
22+
messages (list of dict): A list of message objects in chat history.
23+
temperature (float): The temperature to use in the completion.
24+
25+
Returns:
26+
-------
27+
The API response with the completion result.
28+
29+
"""
30+
return self.replicate_client.chat.completions.create(
31+
model=model,
32+
messages=messages,
33+
temperature=temperature,
34+
)

examples/multi_fm_client.ipynb

+128-52
Original file line numberDiff line numberDiff line change
@@ -1,35 +1,27 @@
11
{
22
"cells": [
33
{
4-
"cell_type": "markdown",
5-
"id": "60c7fb39",
4+
"cell_type": "raw",
5+
"id": "16c03c35-b679-43d4-971b-4ce19e619d51",
66
"metadata": {},
77
"source": [
88
"# MultiFMClient\n",
99
"\n",
10-
"MultiFMClient provides a uniform interface for interacting with LLMs from various providers. It adapts the official python libraries from providers such as Mistral, OpenAI, Meta, Anthropic, etc. to conform to the OpenAI chat completion interface.\n",
10+
"MultiFMClient provides a uniform interface for interacting with LLMs from various providers. It adapts the official python libraries from providers such as Mistral, OpenAI, Groq, Anthropic, Fireworks, Replicate, etc. to conform to the OpenAI chat completion interface.\n",
1111
"\n",
1212
"Below are some examples of how to use MultiFMClient to interact with different LLMs."
1313
]
1414
},
1515
{
1616
"cell_type": "code",
17+
"execution_count": 1,
1718
"id": "initial_id",
1819
"metadata": {
19-
"collapsed": true,
2020
"ExecuteTime": {
2121
"end_time": "2024-07-04T15:30:02.064319Z",
2222
"start_time": "2024-07-04T15:30:02.051986Z"
2323
}
2424
},
25-
"source": [
26-
"import sys\n",
27-
"sys.path.append('../aimodels')\n",
28-
"\n",
29-
"from dotenv import load_dotenv, find_dotenv\n",
30-
"\n",
31-
"load_dotenv(find_dotenv())"
32-
],
3325
"outputs": [
3426
{
3527
"data": {
@@ -42,17 +34,40 @@
4234
"output_type": "execute_result"
4335
}
4436
],
45-
"execution_count": 1
37+
"source": [
38+
"import sys\n",
39+
"sys.path.append('../aimodels')\n",
40+
"\n",
41+
"from dotenv import load_dotenv, find_dotenv\n",
42+
"\n",
43+
"load_dotenv(find_dotenv())"
44+
]
4645
},
4746
{
4847
"cell_type": "code",
48+
"execution_count": 4,
49+
"id": "a54491b7-6aa9-4337-9aba-3a0aef263bb4",
50+
"metadata": {},
51+
"outputs": [],
52+
"source": [
53+
"import os \n",
54+
"\n",
55+
"os.environ['GROQ_API_KEY'] = 'xxx' # get a free key at https://console.groq.com/keys\n",
56+
"os.environ['FIREWORKS_API_KEY'] = 'xxx' # get a free key at https://fireworks.ai/api-keys\n",
57+
"os.environ['REPLICATE_API_KEY'] = 'xxx' # get a free key at https://replicate.com/account/api-tokens"
58+
]
59+
},
60+
{
61+
"cell_type": "code",
62+
"execution_count": 8,
4963
"id": "4de3a24f",
5064
"metadata": {
5165
"ExecuteTime": {
5266
"end_time": "2024-07-04T15:31:12.914321Z",
5367
"start_time": "2024-07-04T15:31:12.796445Z"
5468
}
5569
},
70+
"outputs": [],
5671
"source": [
5772
"from aimodels.client import MultiFMClient\n",
5873
"\n",
@@ -62,59 +77,128 @@
6277
" {\"role\": \"system\", \"content\": \"Respond in Pirate English.\"},\n",
6378
" {\"role\": \"user\", \"content\": \"Tell me a joke\"},\n",
6479
"]"
65-
],
80+
]
81+
},
82+
{
83+
"cell_type": "code",
84+
"execution_count": null,
85+
"id": "668a6cfa-9011-480a-ae1b-6dbd6a51e716",
86+
"metadata": {},
6687
"outputs": [],
67-
"execution_count": 3
88+
"source": [
89+
"# !pip install fireworks-ai"
90+
]
6891
},
6992
{
7093
"cell_type": "code",
71-
"id": "adebd2f0b578a909",
72-
"metadata": {
73-
"ExecuteTime": {
74-
"end_time": "2024-07-04T15:31:25.060689Z",
75-
"start_time": "2024-07-04T15:31:16.131205Z"
94+
"execution_count": 13,
95+
"id": "9900fdf3-a113-40fd-b42f-0e6d866838be",
96+
"metadata": {},
97+
"outputs": [
98+
{
99+
"name": "stdout",
100+
"output_type": "stream",
101+
"text": [
102+
"Arrrr, listen close me hearty! Here be a joke fer ye:\n",
103+
"\n",
104+
"Why did the pirate quit his job?\n",
105+
"\n",
106+
"Because he was sick o' all the arrrr-guments! (get it? arguments, but with an \"arrr\" like a pirate says? aye, I thought it be a good one, matey!)\n"
107+
]
76108
}
77-
},
109+
],
78110
"source": [
79-
"anthropic_claude_3_opus = \"anthropic:claude-3-opus-20240229\"\n",
111+
"fireworks_llama3_8b = \"fireworks:accounts/fireworks/models/llama-v3-8b-instruct\"\n",
112+
"#fireworks_llama3_70b = \"fireworks:accounts/fireworks/models/llama-v3-70b-instruct\"\n",
80113
"\n",
81-
"response = client.chat.completions.create(model=anthropic_claude_3_opus, messages=messages)\n",
114+
"response = client.chat.completions.create(model=fireworks_llama3_8b, messages=messages)\n",
82115
"\n",
83116
"print(response.choices[0].message.content)"
84-
],
117+
]
118+
},
119+
{
120+
"cell_type": "code",
121+
"execution_count": 11,
122+
"id": "c9b2aad6-8603-4227-9566-778f714eb0b5",
123+
"metadata": {},
85124
"outputs": [
86125
{
87126
"name": "stdout",
88127
"output_type": "stream",
89128
"text": [
90-
"Arrr, me bucko, 'ere be a jolly jest fer ye!\n",
129+
"Arrrr, listen close me hearty! Here be a joke fer ye:\n",
130+
"\n",
131+
"Why did the pirate quit his job?\n",
91132
"\n",
92-
"What did th' pirate say on 'is 80th birthday? \"Aye matey!\"\n",
133+
"Because he were sick o' all the arrrr-guments! (get it? arguments, but with arrrr, like a pirate says \"arrgh\"! ahhahahah!)\n",
93134
"\n",
94-
"Ye see, it be a play on words, as \"Aye matey\" sounds like \"I'm eighty\". Har har har! 'Tis a clever bit o' pirate humor, if I do say so meself. Now, 'ow about ye fetch me a mug o' grog while I spin ye another yarn?\n"
135+
"Yer turn, matey! Got a joke to share?\n"
95136
]
96137
}
97138
],
98-
"execution_count": 4
139+
"source": [
140+
"groq_llama3_8b = \"groq:llama3-8b-8192\"\n",
141+
"# groq_llama3_70b = \"groq:llama3-70b-8192\"\n",
142+
"\n",
143+
"response = client.chat.completions.create(model=groq_llama3_8b, messages=messages)\n",
144+
"\n",
145+
"print(response.choices[0].message.content)"
146+
]
99147
},
100148
{
101149
"cell_type": "code",
102-
"execution_count": 4,
103-
"id": "6819ac17",
150+
"execution_count": 12,
151+
"id": "6baf88b8-2ecb-4bdf-9263-4af949668d16",
104152
"metadata": {},
105153
"outputs": [
106154
{
107155
"name": "stdout",
108156
"output_type": "stream",
109157
"text": [
110-
"Arrrr, here be a joke fer ye!\n",
158+
"Arrrr, listen close me hearty! Here be a joke fer ye:\n",
159+
"\n",
160+
"Why did the pirate quit his job?\n",
111161
"\n",
112-
"Why did the pirate take a parrot on his ship?\n",
162+
"Because he were sick o' all the arrrr-guments! (get it? arguments, but with arrrr, like a pirate says \"arrgh\"! ahhahahah!)\n",
113163
"\n",
114-
"Because it were a hootin' good bird to have around, savvy? Aye, and it kept 'im company while he were swabbin' the decks! Arrrgh, I hope that made ye laugh, matey!\n"
164+
"Yer turn, matey! Got a joke to share?\n"
115165
]
116166
}
117167
],
168+
"source": [
169+
"replicate_llama3_8b = \"replicate:meta/meta-llama-3-8b-instruct\"\n",
170+
"#replicate_llama3_70b = \"replicate:meta/meta-llama-3-70b-instruct\"\n",
171+
"\n",
172+
"response = client.chat.completions.create(model=replicate_llama3_8b, messages=messages)\n",
173+
"\n",
174+
"print(response.choices[0].message.content)"
175+
]
176+
},
177+
{
178+
"cell_type": "code",
179+
"execution_count": null,
180+
"id": "adebd2f0b578a909",
181+
"metadata": {
182+
"ExecuteTime": {
183+
"end_time": "2024-07-04T15:31:25.060689Z",
184+
"start_time": "2024-07-04T15:31:16.131205Z"
185+
}
186+
},
187+
"outputs": [],
188+
"source": [
189+
"anthropic_claude_3_opus = \"anthropic:claude-3-opus-20240229\"\n",
190+
"\n",
191+
"response = client.chat.completions.create(model=anthropic_claude_3_opus, messages=messages)\n",
192+
"\n",
193+
"print(response.choices[0].message.content)"
194+
]
195+
},
196+
{
197+
"cell_type": "code",
198+
"execution_count": null,
199+
"id": "6819ac17",
200+
"metadata": {},
201+
"outputs": [],
118202
"source": [
119203
"ollama_llama3 = \"ollama:llama3\"\n",
120204
"\n",
@@ -124,44 +208,36 @@
124208
]
125209
},
126210
{
211+
"cell_type": "code",
212+
"execution_count": null,
213+
"id": "4a94961b2bddedbb",
127214
"metadata": {
128215
"ExecuteTime": {
129216
"end_time": "2024-07-04T15:31:39.472675Z",
130217
"start_time": "2024-07-04T15:31:38.283368Z"
131218
}
132219
},
133-
"cell_type": "code",
220+
"outputs": [],
134221
"source": [
135222
"mistral_7b = \"mistral:open-mistral-7b\"\n",
136223
"\n",
137224
"response = client.chat.completions.create(model=mistral_7b, messages=messages, temperature=0.2)\n",
138225
"\n",
139226
"print(response.choices[0].message.content)"
140-
],
141-
"id": "4a94961b2bddedbb",
142-
"outputs": [
143-
{
144-
"name": "stdout",
145-
"output_type": "stream",
146-
"text": [
147-
"Arr matey, I've got a jest fer ye, if ye be ready for a laugh! Why did the pirate bring a clock to the island? Because he wanted to catch the time! Aye, that be a good one, I be thinkin'. Arrr!\n"
148-
]
149-
}
150-
],
151-
"execution_count": 5
227+
]
152228
},
153229
{
154-
"metadata": {},
155230
"cell_type": "code",
156-
"outputs": [],
157231
"execution_count": null,
158-
"source": "",
159-
"id": "611210a4dc92845f"
232+
"id": "611210a4dc92845f",
233+
"metadata": {},
234+
"outputs": [],
235+
"source": []
160236
}
161237
],
162238
"metadata": {
163239
"kernelspec": {
164-
"display_name": "Python 3",
240+
"display_name": "Python 3 (ipykernel)",
165241
"language": "python",
166242
"name": "python3"
167243
},
@@ -175,7 +251,7 @@
175251
"name": "python",
176252
"nbconvert_exporter": "python",
177253
"pygments_lexer": "ipython3",
178-
"version": "3.12.3"
254+
"version": "3.10.14"
179255
}
180256
},
181257
"nbformat": 4,

0 commit comments

Comments
 (0)