Skip to content

Commit 82fa1a1

Browse files
authored
feat: Integrate AIML model platform (#1580)
1 parent a65d44e commit 82fa1a1

File tree

14 files changed

+365
-13
lines changed

14 files changed

+365
-13
lines changed

.github/workflows/build_package.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -82,6 +82,7 @@ jobs:
8282
JINA_API_KEY: "${{ secrets.JINA_API_KEY }}"
8383
SILICONFLOW_API_KEY: "${{ secrets.SILICONFLOW_API_KEY }}"
8484
MOONSHOT_API_KEY: "${{ secrets.MOONSHOT_API_KEY }}"
85+
AIML_API_KEY: "${{ secrets.AIML_API_KEY }}"
8586
run: |
8687
source venv/bin/activate
8788
pytest --fast-test-mode ./test

.github/workflows/pytest_apps.yml

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,10 +28,6 @@ jobs:
2828
OPENAI_API_KEY: "${{ secrets.OPENAI_API_KEY }}"
2929
GOOGLE_API_KEY: "${{ secrets.GOOGLE_API_KEY }}"
3030
SEARCH_ENGINE_ID: "${{ secrets.SEARCH_ENGINE_ID }}"
31-
COHERE_API_KEY: "${{ secrets.COHERE_API_KEY }}"
32-
INTERNLM_API_KEY: "${{ secrets.INTERNLM_API_KEY }}"
33-
MOONSHOT_API_KEY: "${{ secrets.MOONSHOT_API_KEY }}"
34-
SILICONFLOW_API_KEY: "${{ secrets.SILICONFLOW_API_KEY }}"
3531
run: poetry run pytest -v apps/
3632

3733
pytest_examples:
@@ -53,4 +49,5 @@ jobs:
5349
INTERNLM_API_KEY: "${{ secrets.INTERNLM_API_KEY }}"
5450
MOONSHOT_API_KEY: "${{ secrets.MOONSHOT_API_KEY }}"
5551
SILICONFLOW_API_KEY: "${{ secrets.SILICONFLOW_API_KEY }}"
52+
AIML_API_KEY: "${{ secrets.AIML_API_KEY }}"
5653
run: poetry run pytest -v examples/

.github/workflows/pytest_package.yml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,7 @@ jobs:
6161
JINA_API_KEY: "${{ secrets.JINA_API_KEY }}"
6262
MOONSHOT_API_KEY: "${{ secrets.MOONSHOT_API_KEY }}"
6363
SILICONFLOW_API_KEY: "${{ secrets.SILICONFLOW_API_KEY }}"
64+
AIML_API_KEY: "${{ secrets.AIML_API_KEY }}"
6465
run: poetry run pytest --fast-test-mode test/
6566

6667
pytest_package_llm_test:
@@ -111,6 +112,7 @@ jobs:
111112
JINA_API_KEY: "${{ secrets.JINA_API_KEY }}"
112113
MOONSHOT_API_KEY: "${{ secrets.MOONSHOT_API_KEY }}"
113114
SILICONFLOW_API_KEY: "${{ secrets.SILICONFLOW_API_KEY }}"
115+
AIML_API_KEY: "${{ secrets.AIML_API_KEY }}"
114116
run: poetry run pytest --llm-test-only test/
115117

116118
pytest_package_very_slow_test:

camel/configs/__init__.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
# See the License for the specific language governing permissions and
1212
# limitations under the License.
1313
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
14+
from .aiml_config import AIML_API_PARAMS, AIMLConfig
1415
from .anthropic_config import ANTHROPIC_API_PARAMS, AnthropicConfig
1516
from .base_config import BaseConfig
1617
from .cohere_config import COHERE_API_PARAMS, CohereConfig
@@ -85,4 +86,6 @@
8586
"MOONSHOT_API_PARAMS",
8687
'SiliconFlowConfig',
8788
'SILICONFLOW_API_PARAMS',
89+
'AIMLConfig',
90+
'AIML_API_PARAMS',
8891
]

camel/configs/aiml_config.py

Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
2+
# Licensed under the Apache License, Version 2.0 (the "License");
3+
# you may not use this file except in compliance with the License.
4+
# You may obtain a copy of the License at
5+
#
6+
# http://www.apache.org/licenses/LICENSE-2.0
7+
#
8+
# Unless required by applicable law or agreed to in writing, software
9+
# distributed under the License is distributed on an "AS IS" BASIS,
10+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11+
# See the License for the specific language governing permissions and
12+
# limitations under the License.
13+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
14+
from __future__ import annotations
15+
16+
from typing import Sequence, Type, Union
17+
18+
from pydantic import BaseModel, Field
19+
20+
from camel.configs.base_config import BaseConfig
21+
from camel.types import NOT_GIVEN, NotGiven
22+
23+
24+
class AIMLConfig(BaseConfig):
25+
r"""Defines the parameters for generating chat completions using the
26+
AIML API.
27+
28+
Args:
29+
temperature (float, optional): Determines the degree of randomness
30+
in the response. (default: :obj:`0.7`)
31+
top_p (float, optional): The top_p (nucleus) parameter is used to
32+
dynamically adjust the number of choices for each predicted token
33+
based on the cumulative probabilities. (default: :obj:`0.7`)
34+
n (int, optional): Number of generations to return. (default::obj:`1`)
35+
response_format (object, optional): An object specifying the format
36+
that the model must output.
37+
stream (bool, optional): If set, tokens are returned as Server-Sent
38+
Events as they are made available. (default: :obj:`False`)
39+
stop (str or list, optional): Up to :obj:`4` sequences where the API
40+
will stop generating further tokens. (default: :obj:`None`)
41+
max_tokens (int, optional): The maximum number of tokens to generate.
42+
(default: :obj:`None`)
43+
logit_bias (dict, optional): Modify the likelihood of specified tokens
44+
appearing in the completion. Accepts a json object that maps tokens
45+
(specified by their token ID in the tokenizer) to an associated
46+
bias value from :obj:`-100` to :obj:`100`. Mathematically, the bias
47+
is added to the logits generated by the model prior to sampling.
48+
The exact effect will vary per model, but values between:obj:` -1`
49+
and :obj:`1` should decrease or increase likelihood of selection;
50+
values like :obj:`-100` or :obj:`100` should result in a ban or
51+
exclusive selection of the relevant token. (default: :obj:`{}`)
52+
frequency_penalty (float, optional): Number between :obj:`-2.0` and
53+
:obj:`2.0`. Positive values penalize new tokens based on their
54+
existing frequency in the text so far, decreasing the model's
55+
likelihood to repeat the same line verbatim. See more information
56+
about frequency and presence penalties. (default: :obj:`0.0`)
57+
presence_penalty (float, optional): Number between :obj:`-2.0` and
58+
:obj:`2.0`. Positive values penalize new tokens based on whether
59+
they appear in the text so far, increasing the model's likelihood
60+
to talk about new topics. See more information about frequency and
61+
presence penalties. (default: :obj:`0.0`)
62+
tools (list[FunctionTool], optional): A list of tools the model may
63+
call. Currently, only functions are supported as a tool. Use this
64+
to provide a list of functions the model may generate JSON inputs
65+
for. A max of 128 functions are supported.
66+
"""
67+
68+
temperature: float = 0.7
69+
top_p: float = 0.7
70+
n: int = 1
71+
stream: bool = False
72+
stop: Union[str, Sequence[str], NotGiven] = NOT_GIVEN
73+
max_tokens: Union[int, NotGiven] = NOT_GIVEN
74+
logit_bias: dict = Field(default_factory=dict)
75+
response_format: Union[Type[BaseModel], dict, NotGiven] = NOT_GIVEN
76+
presence_penalty: float = 0.0
77+
frequency_penalty: float = 0.0
78+
79+
80+
AIML_API_PARAMS = {param for param in AIMLConfig.model_fields.keys()}

camel/models/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
# See the License for the specific language governing permissions and
1212
# limitations under the License.
1313
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
14+
from .aiml_model import AIMLModel
1415
from .anthropic_model import AnthropicModel
1516
from .azure_openai_model import AzureOpenAIModel
1617
from .base_model import BaseModelBackend
@@ -72,4 +73,5 @@
7273
'FishAudioModel',
7374
'InternLMModel',
7475
'MoonshotModel',
76+
'AIMLModel',
7577
]

camel/models/aiml_model.py

Lines changed: 147 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,147 @@
1+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
2+
# Licensed under the Apache License, Version 2.0 (the "License");
3+
# you may not use this file except in compliance with the License.
4+
# You may obtain a copy of the License at
5+
#
6+
# http://www.apache.org/licenses/LICENSE-2.0
7+
#
8+
# Unless required by applicable law or agreed to in writing, software
9+
# distributed under the License is distributed on an "AS IS" BASIS,
10+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11+
# See the License for the specific language governing permissions and
12+
# limitations under the License.
13+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
14+
import os
15+
from typing import Any, Dict, List, Optional, Union
16+
17+
from openai import OpenAI, Stream
18+
19+
from camel.configs import AIML_API_PARAMS, AIMLConfig
20+
from camel.messages import OpenAIMessage
21+
from camel.models.base_model import BaseModelBackend
22+
from camel.types import (
23+
ChatCompletion,
24+
ChatCompletionChunk,
25+
ModelType,
26+
)
27+
from camel.utils import (
28+
BaseTokenCounter,
29+
OpenAITokenCounter,
30+
api_keys_required,
31+
)
32+
33+
34+
class AIMLModel(BaseModelBackend):
35+
r"""AIML API in a unified BaseModelBackend interface.
36+
37+
Args:
38+
model_type (Union[ModelType, str]): Model for which a backend is
39+
created.
40+
model_config_dict (Optional[Dict[str, Any]], optional): A dictionary
41+
that will be fed into OpenAI client. If :obj:`None`,
42+
:obj:`AIMLConfig().as_dict()` will be used.
43+
(default: :obj:`None`)
44+
api_key (Optional[str], optional): The API key for authenticating with
45+
the AIML service. (default: :obj:`None`)
46+
url (Optional[str], optional): The URL to the AIML service. If
47+
not provided, :obj:`https://api.aimlapi.com/v1` will be used.
48+
(default: :obj:`None`)
49+
token_counter (Optional[BaseTokenCounter], optional): Token counter to
50+
use for the model. If not provided, :obj:`OpenAITokenCounter(
51+
ModelType.GPT_4O_MINI)` will be used.
52+
(default: :obj:`None`)
53+
"""
54+
55+
@api_keys_required(
56+
[
57+
("api_key", 'AIML_API_KEY'),
58+
]
59+
)
60+
def __init__(
61+
self,
62+
model_type: Union[ModelType, str],
63+
model_config_dict: Optional[Dict[str, Any]] = None,
64+
api_key: Optional[str] = None,
65+
url: Optional[str] = None,
66+
token_counter: Optional[BaseTokenCounter] = None,
67+
) -> None:
68+
if model_config_dict is None:
69+
model_config_dict = AIMLConfig().as_dict()
70+
api_key = api_key or os.environ.get("AIML_API_KEY")
71+
url = url or os.environ.get(
72+
"AIML_API_BASE_URL",
73+
"https://api.aimlapi.com/v1",
74+
)
75+
super().__init__(
76+
model_type, model_config_dict, api_key, url, token_counter
77+
)
78+
self._client = OpenAI(
79+
timeout=180,
80+
max_retries=3,
81+
api_key=self._api_key,
82+
base_url=self._url,
83+
)
84+
85+
def run(
86+
self,
87+
messages: List[OpenAIMessage],
88+
) -> Union[ChatCompletion, Stream[ChatCompletionChunk]]:
89+
r"""Runs inference of OpenAI chat completion.
90+
91+
Args:
92+
messages (List[OpenAIMessage]): Message list with the chat history
93+
in OpenAI API format.
94+
95+
Returns:
96+
Union[ChatCompletion, Stream[ChatCompletionChunk]]:
97+
`ChatCompletion` in the non-stream mode, or
98+
`Stream[ChatCompletionChunk]` in the stream mode.
99+
"""
100+
# Process model configuration parameters
101+
model_config = self.model_config_dict.copy()
102+
103+
# Handle special case for tools parameter
104+
if model_config.get('tools') is None:
105+
model_config['tools'] = []
106+
107+
response = self._client.chat.completions.create(
108+
messages=messages, model=self.model_type, **model_config
109+
)
110+
return response
111+
112+
@property
113+
def token_counter(self) -> BaseTokenCounter:
114+
r"""Initialize the token counter for the model backend.
115+
116+
Returns:
117+
BaseTokenCounter: The token counter following the model's
118+
tokenization style.
119+
"""
120+
if not self._token_counter:
121+
self._token_counter = OpenAITokenCounter(ModelType.GPT_4O_MINI)
122+
return self._token_counter
123+
124+
def check_model_config(self):
125+
r"""Check whether the model configuration contains any
126+
unexpected arguments to AIML API.
127+
128+
Raises:
129+
ValueError: If the model configuration dictionary contains any
130+
unexpected arguments to AIML API.
131+
"""
132+
for param in self.model_config_dict:
133+
if param not in AIML_API_PARAMS:
134+
raise ValueError(
135+
f"Unexpected argument `{param}` is "
136+
"input into AIML model backend."
137+
)
138+
139+
@property
140+
def stream(self) -> bool:
141+
"""Returns whether the model is in stream mode, which sends partial
142+
results each time.
143+
144+
Returns:
145+
bool: Whether the model is in stream mode.
146+
"""
147+
return self.model_config_dict.get('stream', False)

camel/models/model_factory.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
1414
from typing import Dict, Optional, Type, Union
1515

16+
from camel.models.aiml_model import AIMLModel
1617
from camel.models.anthropic_model import AnthropicModel
1718
from camel.models.azure_openai_model import AzureOpenAIModel
1819
from camel.models.base_model import BaseModelBackend
@@ -104,6 +105,8 @@ def create(
104105
model_class = NvidiaModel
105106
elif model_platform.is_siliconflow:
106107
model_class = SiliconFlowModel
108+
elif model_platform.is_aiml:
109+
model_class = AIMLModel
107110

108111
elif model_platform.is_openai and model_type.is_openai:
109112
model_class = OpenAIModel

camel/models/siliconflow_model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818

1919
from camel.configs import SILICONFLOW_API_PARAMS, SiliconFlowConfig
2020
from camel.messages import OpenAIMessage
21-
from camel.models import BaseModelBackend
21+
from camel.models.base_model import BaseModelBackend
2222
from camel.types import (
2323
ChatCompletion,
2424
ChatCompletionChunk,

camel/types/enums.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -204,6 +204,10 @@ class ModelType(UnifiedModelType, Enum):
204204
SILICONFLOW_THUDM_GLM_4_9B_CHAT = "THUDM/glm-4-9b-chat"
205205
SILICONFLOW_PRO_THUDM_GLM_4_9B_CHAT = "Pro/THUDM/glm-4-9b-chat"
206206

207+
# AIML models support tool calling
208+
AIML_MIXTRAL_8X7B = "mistralai/Mixtral-8x7B-Instruct-v0.1"
209+
AIML_MISTRAL_7B_INSTRUCT = "mistralai/Mistral-7B-Instruct-v0.1"
210+
207211
def __str__(self):
208212
return self.value
209213

@@ -242,6 +246,7 @@ def support_native_tool_calling(self) -> bool:
242246
self.is_moonshot,
243247
self.is_siliconflow,
244248
self.is_zhipuai,
249+
self.is_aiml,
245250
]
246251
)
247252

@@ -517,6 +522,13 @@ def is_siliconflow(self) -> bool:
517522
ModelType.SILICONFLOW_PRO_THUDM_GLM_4_9B_CHAT,
518523
}
519524

525+
@property
526+
def is_aiml(self) -> bool:
527+
return self in {
528+
ModelType.AIML_MIXTRAL_8X7B,
529+
ModelType.AIML_MISTRAL_7B_INSTRUCT,
530+
}
531+
520532
@property
521533
def token_limit(self) -> int:
522534
r"""Returns the maximum token limit for a given model.
@@ -590,6 +602,8 @@ def token_limit(self) -> int:
590602
ModelType.TOGETHER_MIXTRAL_8_7B,
591603
ModelType.SGLANG_MISTRAL_7B,
592604
ModelType.MOONSHOT_V1_32K,
605+
ModelType.AIML_MIXTRAL_8X7B,
606+
ModelType.AIML_MISTRAL_7B_INSTRUCT,
593607
}:
594608
return 32_768
595609
elif self in {
@@ -868,6 +882,7 @@ class ModelPlatformType(Enum):
868882
INTERNLM = "internlm"
869883
MOONSHOT = "moonshot"
870884
SILICONFLOW = "siliconflow"
885+
AIML = "aiml"
871886

872887
@property
873888
def is_openai(self) -> bool:
@@ -985,6 +1000,11 @@ def is_siliconflow(self) -> bool:
9851000
r"""Returns whether this platform is SiliconFlow."""
9861001
return self is ModelPlatformType.SILICONFLOW
9871002

1003+
@property
1004+
def is_aiml(self) -> bool:
1005+
r"""Returns whether this platform is AIML."""
1006+
return self is ModelPlatformType.AIML
1007+
9881008

9891009
class AudioModelType(Enum):
9901010
TTS_1 = "tts-1"

docs/key_modules/models.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -101,6 +101,7 @@ The following table lists currently supported model platforms by CAMEL.
101101
| Together AI | https://docs.together.ai/docs/chat-models | ----- |
102102
| LiteLLM | https://docs.litellm.ai/docs/providers | ----- |
103103
| SGLang | https://sgl-project.github.io/references/supported_models.html | ----- |
104+
| AIML | https://docs.aimlapi.com/api-overview/model-database/text-models | ----- |
104105

105106
## 3. Using Models by API calling
106107

0 commit comments

Comments
 (0)