camel-ai 0.1.5.2__py3-none-any.whl → 0.1.5.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of camel-ai might be problematic. Click here for more details.
- camel/agents/chat_agent.py +21 -17
- camel/agents/critic_agent.py +6 -9
- camel/agents/deductive_reasoner_agent.py +7 -9
- camel/agents/embodied_agent.py +6 -9
- camel/agents/knowledge_graph_agent.py +12 -10
- camel/agents/role_assignment_agent.py +10 -11
- camel/agents/search_agent.py +5 -5
- camel/agents/task_agent.py +26 -38
- camel/configs/openai_config.py +14 -0
- camel/embeddings/base.py +10 -9
- camel/embeddings/openai_embedding.py +25 -12
- camel/embeddings/sentence_transformers_embeddings.py +28 -14
- camel/functions/open_api_function.py +11 -4
- camel/functions/slack_functions.py +14 -2
- camel/models/__init__.py +4 -0
- camel/models/anthropic_model.py +4 -2
- camel/models/base_model.py +4 -1
- camel/models/model_factory.py +42 -21
- camel/models/nemotron_model.py +71 -0
- camel/models/ollama_model.py +121 -0
- camel/models/open_source_model.py +7 -2
- camel/models/openai_model.py +8 -3
- camel/models/stub_model.py +3 -1
- camel/prompts/__init__.py +4 -0
- camel/prompts/generate_text_embedding_data.py +79 -0
- camel/prompts/task_prompt_template.py +4 -0
- camel/retrievers/auto_retriever.py +2 -2
- camel/societies/role_playing.py +16 -19
- camel/storages/graph_storages/graph_element.py +9 -1
- camel/types/__init__.py +2 -0
- camel/types/enums.py +84 -22
- camel/utils/commons.py +4 -0
- camel/utils/token_counting.py +5 -3
- {camel_ai-0.1.5.2.dist-info → camel_ai-0.1.5.4.dist-info}/METADATA +60 -47
- {camel_ai-0.1.5.2.dist-info → camel_ai-0.1.5.4.dist-info}/RECORD +36 -36
- camel/bots/__init__.py +0 -20
- camel/bots/discord_bot.py +0 -103
- camel/bots/telegram_bot.py +0 -84
- {camel_ai-0.1.5.2.dist-info → camel_ai-0.1.5.4.dist-info}/WHEEL +0 -0
|
@@ -11,10 +11,12 @@
|
|
|
11
11
|
# See the License for the specific language governing permissions and
|
|
12
12
|
# limitations under the License.
|
|
13
13
|
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
|
|
14
|
+
from __future__ import annotations
|
|
15
|
+
|
|
14
16
|
import os
|
|
15
|
-
from typing import Any
|
|
17
|
+
from typing import Any
|
|
16
18
|
|
|
17
|
-
from openai import OpenAI
|
|
19
|
+
from openai import NOT_GIVEN, NotGiven, OpenAI
|
|
18
20
|
|
|
19
21
|
from camel.embeddings.base import BaseEmbedding
|
|
20
22
|
from camel.types import EmbeddingModelType
|
|
@@ -25,10 +27,13 @@ class OpenAIEmbedding(BaseEmbedding[str]):
|
|
|
25
27
|
r"""Provides text embedding functionalities using OpenAI's models.
|
|
26
28
|
|
|
27
29
|
Args:
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
30
|
+
model_type (EmbeddingModelType, optional): The model type to be
|
|
31
|
+
used for text embeddings.
|
|
32
|
+
(default: :obj:`TEXT_EMBEDDING_3_SMALL`)
|
|
33
|
+
api_key (str, optional): The API key for authenticating with the
|
|
31
34
|
OpenAI service. (default: :obj:`None`)
|
|
35
|
+
dimensions (int, optional): The text embedding output dimensions.
|
|
36
|
+
(default: :obj:`NOT_GIVEN`)
|
|
32
37
|
|
|
33
38
|
Raises:
|
|
34
39
|
RuntimeError: If an unsupported model type is specified.
|
|
@@ -36,36 +41,44 @@ class OpenAIEmbedding(BaseEmbedding[str]):
|
|
|
36
41
|
|
|
37
42
|
def __init__(
|
|
38
43
|
self,
|
|
39
|
-
model_type: EmbeddingModelType =
|
|
40
|
-
|
|
44
|
+
model_type: EmbeddingModelType = (
|
|
45
|
+
EmbeddingModelType.TEXT_EMBEDDING_3_SMALL
|
|
46
|
+
),
|
|
47
|
+
api_key: str | None = None,
|
|
48
|
+
dimensions: int | NotGiven = NOT_GIVEN,
|
|
41
49
|
) -> None:
|
|
42
50
|
if not model_type.is_openai:
|
|
43
51
|
raise ValueError("Invalid OpenAI embedding model type.")
|
|
44
52
|
self.model_type = model_type
|
|
45
|
-
|
|
53
|
+
if dimensions == NOT_GIVEN:
|
|
54
|
+
self.output_dim = model_type.output_dim
|
|
55
|
+
else:
|
|
56
|
+
assert isinstance(dimensions, int)
|
|
57
|
+
self.output_dim = dimensions
|
|
46
58
|
self._api_key = api_key or os.environ.get("OPENAI_API_KEY")
|
|
47
59
|
self.client = OpenAI(timeout=60, max_retries=3, api_key=self._api_key)
|
|
48
60
|
|
|
49
61
|
@model_api_key_required
|
|
50
62
|
def embed_list(
|
|
51
63
|
self,
|
|
52
|
-
objs:
|
|
64
|
+
objs: list[str],
|
|
53
65
|
**kwargs: Any,
|
|
54
|
-
) ->
|
|
66
|
+
) -> list[list[float]]:
|
|
55
67
|
r"""Generates embeddings for the given texts.
|
|
56
68
|
|
|
57
69
|
Args:
|
|
58
|
-
objs (
|
|
70
|
+
objs (list[str]): The texts for which to generate the embeddings.
|
|
59
71
|
**kwargs (Any): Extra kwargs passed to the embedding API.
|
|
60
72
|
|
|
61
73
|
Returns:
|
|
62
|
-
|
|
74
|
+
list[list[float]]: A list that represents the generated embedding
|
|
63
75
|
as a list of floating-point numbers.
|
|
64
76
|
"""
|
|
65
77
|
# TODO: count tokens
|
|
66
78
|
response = self.client.embeddings.create(
|
|
67
79
|
input=objs,
|
|
68
80
|
model=self.model_type.value,
|
|
81
|
+
dimensions=self.output_dim,
|
|
69
82
|
**kwargs,
|
|
70
83
|
)
|
|
71
84
|
return [data.embedding for data in response.data]
|
|
@@ -11,51 +11,63 @@
|
|
|
11
11
|
# See the License for the specific language governing permissions and
|
|
12
12
|
# limitations under the License.
|
|
13
13
|
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
|
|
14
|
-
from
|
|
14
|
+
from __future__ import annotations
|
|
15
|
+
|
|
16
|
+
from typing import Any
|
|
17
|
+
|
|
18
|
+
from numpy import ndarray
|
|
15
19
|
|
|
16
20
|
from camel.embeddings.base import BaseEmbedding
|
|
17
21
|
|
|
18
22
|
|
|
19
23
|
class SentenceTransformerEncoder(BaseEmbedding[str]):
|
|
20
|
-
r"""This class provides functionalities to generate
|
|
21
|
-
using
|
|
24
|
+
r"""This class provides functionalities to generate text
|
|
25
|
+
embeddings using `Sentence Transformers`.
|
|
22
26
|
|
|
23
27
|
References:
|
|
24
28
|
https://www.sbert.net/
|
|
25
29
|
"""
|
|
26
30
|
|
|
27
|
-
def __init__(
|
|
31
|
+
def __init__(
|
|
32
|
+
self,
|
|
33
|
+
model_name: str = "intfloat/e5-large-v2",
|
|
34
|
+
**kwargs,
|
|
35
|
+
):
|
|
28
36
|
r"""Initializes the: obj: `SentenceTransformerEmbedding` class
|
|
29
37
|
with the specified transformer model.
|
|
30
38
|
|
|
31
39
|
Args:
|
|
32
40
|
model_name (str, optional): The name of the model to use.
|
|
33
|
-
|
|
41
|
+
(default: :obj:`intfloat/e5-large-v2`)
|
|
42
|
+
**kwargs (optional): Additional arguments of
|
|
43
|
+
:class:`SentenceTransformer`, such as :obj:`prompts` etc.
|
|
34
44
|
"""
|
|
35
45
|
from sentence_transformers import SentenceTransformer
|
|
36
46
|
|
|
37
|
-
self.model = SentenceTransformer(model_name)
|
|
47
|
+
self.model = SentenceTransformer(model_name, **kwargs)
|
|
38
48
|
|
|
39
49
|
def embed_list(
|
|
40
50
|
self,
|
|
41
|
-
objs:
|
|
51
|
+
objs: list[str],
|
|
42
52
|
**kwargs: Any,
|
|
43
|
-
) ->
|
|
53
|
+
) -> list[list[float]]:
|
|
44
54
|
r"""Generates embeddings for the given texts using the model.
|
|
45
55
|
|
|
46
56
|
Args:
|
|
47
|
-
objs (
|
|
48
|
-
|
|
57
|
+
objs (list[str]): The texts for which to generate the
|
|
58
|
+
embeddings.
|
|
49
59
|
|
|
50
60
|
Returns:
|
|
51
|
-
|
|
61
|
+
list[list[float]]: A list that represents the generated embedding
|
|
52
62
|
as a list of floating-point numbers.
|
|
53
63
|
"""
|
|
54
64
|
if not objs:
|
|
55
65
|
raise ValueError("Input text list is empty")
|
|
56
|
-
|
|
66
|
+
embeddings = self.model.encode(
|
|
57
67
|
objs, normalize_embeddings=True, **kwargs
|
|
58
|
-
)
|
|
68
|
+
)
|
|
69
|
+
assert isinstance(embeddings, ndarray)
|
|
70
|
+
return embeddings.tolist()
|
|
59
71
|
|
|
60
72
|
def get_output_dim(self) -> int:
|
|
61
73
|
r"""Returns the output dimension of the embeddings.
|
|
@@ -63,4 +75,6 @@ class SentenceTransformerEncoder(BaseEmbedding[str]):
|
|
|
63
75
|
Returns:
|
|
64
76
|
int: The dimensionality of the embeddings.
|
|
65
77
|
"""
|
|
66
|
-
|
|
78
|
+
output_dim = self.model.get_sentence_embedding_dimension()
|
|
79
|
+
assert isinstance(output_dim, int)
|
|
80
|
+
return output_dim
|
|
@@ -13,16 +13,15 @@
|
|
|
13
13
|
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
|
|
14
14
|
import json
|
|
15
15
|
import os
|
|
16
|
-
from typing import Any, Callable, Dict, List, Tuple
|
|
16
|
+
from typing import Any, Callable, Dict, List, Optional, Tuple
|
|
17
17
|
|
|
18
|
-
import prance
|
|
19
18
|
import requests
|
|
20
19
|
|
|
21
20
|
from camel.functions import OpenAIFunction, openapi_security_config
|
|
22
21
|
from camel.types import OpenAPIName
|
|
23
22
|
|
|
24
23
|
|
|
25
|
-
def parse_openapi_file(openapi_spec_path: str) -> Dict[str, Any]:
|
|
24
|
+
def parse_openapi_file(openapi_spec_path: str) -> Optional[Dict[str, Any]]:
|
|
26
25
|
r"""Load and parse an OpenAPI specification file.
|
|
27
26
|
|
|
28
27
|
This function utilizes the `prance.ResolvingParser` to parse and resolve
|
|
@@ -34,8 +33,14 @@ def parse_openapi_file(openapi_spec_path: str) -> Dict[str, Any]:
|
|
|
34
33
|
specification.
|
|
35
34
|
|
|
36
35
|
Returns:
|
|
37
|
-
Dict[str, Any]: The parsed OpenAPI specification
|
|
36
|
+
Optional[Dict[str, Any]]: The parsed OpenAPI specification
|
|
37
|
+
as a dictionary. :obj:`None` if the package is not installed.
|
|
38
38
|
"""
|
|
39
|
+
try:
|
|
40
|
+
import prance
|
|
41
|
+
except Exception:
|
|
42
|
+
return None
|
|
43
|
+
|
|
39
44
|
# Load the OpenAPI spec
|
|
40
45
|
parser = prance.ResolvingParser(
|
|
41
46
|
openapi_spec_path, backend="openapi-spec-validator", strict=False
|
|
@@ -451,6 +456,8 @@ def apinames_filepaths_to_funs_schemas(
|
|
|
451
456
|
)
|
|
452
457
|
|
|
453
458
|
openapi_spec = parse_openapi_file(file_path)
|
|
459
|
+
if openapi_spec is None:
|
|
460
|
+
return [], []
|
|
454
461
|
|
|
455
462
|
# Generate and merge function schemas
|
|
456
463
|
openapi_functions_schemas = openapi_spec_to_openai_schemas(
|
|
@@ -24,8 +24,6 @@ if TYPE_CHECKING:
|
|
|
24
24
|
|
|
25
25
|
from slack_sdk import WebClient
|
|
26
26
|
|
|
27
|
-
from slack_sdk.errors import SlackApiError
|
|
28
|
-
|
|
29
27
|
from camel.functions import OpenAIFunction
|
|
30
28
|
|
|
31
29
|
logger = logging.getLogger(__name__)
|
|
@@ -89,6 +87,8 @@ def create_slack_channel(name: str, is_private: Optional[bool] = True) -> str:
|
|
|
89
87
|
SlackApiError: If there is an error during get slack channel
|
|
90
88
|
information.
|
|
91
89
|
"""
|
|
90
|
+
from slack_sdk.errors import SlackApiError
|
|
91
|
+
|
|
92
92
|
try:
|
|
93
93
|
slack_client = _login_slack()
|
|
94
94
|
response = slack_client.conversations_create(
|
|
@@ -115,6 +115,8 @@ def join_slack_channel(channel_id: str) -> str:
|
|
|
115
115
|
SlackApiError: If there is an error during get slack channel
|
|
116
116
|
information.
|
|
117
117
|
"""
|
|
118
|
+
from slack_sdk.errors import SlackApiError
|
|
119
|
+
|
|
118
120
|
try:
|
|
119
121
|
slack_client = _login_slack()
|
|
120
122
|
response = slack_client.conversations_join(channel=channel_id)
|
|
@@ -137,6 +139,8 @@ def leave_slack_channel(channel_id: str) -> str:
|
|
|
137
139
|
SlackApiError: If there is an error during get slack channel
|
|
138
140
|
information.
|
|
139
141
|
"""
|
|
142
|
+
from slack_sdk.errors import SlackApiError
|
|
143
|
+
|
|
140
144
|
try:
|
|
141
145
|
slack_client = _login_slack()
|
|
142
146
|
response = slack_client.conversations_leave(channel=channel_id)
|
|
@@ -155,6 +159,8 @@ def get_slack_channel_information() -> str:
|
|
|
155
159
|
SlackApiError: If there is an error during get slack channel
|
|
156
160
|
information.
|
|
157
161
|
"""
|
|
162
|
+
from slack_sdk.errors import SlackApiError
|
|
163
|
+
|
|
158
164
|
try:
|
|
159
165
|
slack_client = _login_slack()
|
|
160
166
|
response = slack_client.conversations_list()
|
|
@@ -189,6 +195,8 @@ def get_slack_channel_message(channel_id: str) -> str:
|
|
|
189
195
|
Raises:
|
|
190
196
|
SlackApiError: If there is an error during get slack channel message.
|
|
191
197
|
"""
|
|
198
|
+
from slack_sdk.errors import SlackApiError
|
|
199
|
+
|
|
192
200
|
try:
|
|
193
201
|
slack_client = _login_slack()
|
|
194
202
|
result = slack_client.conversations_history(channel=channel_id)
|
|
@@ -222,6 +230,8 @@ def send_slack_message(
|
|
|
222
230
|
Raises:
|
|
223
231
|
SlackApiError: If an error occurs while sending the message.
|
|
224
232
|
"""
|
|
233
|
+
from slack_sdk.errors import SlackApiError
|
|
234
|
+
|
|
225
235
|
try:
|
|
226
236
|
slack_client = _login_slack()
|
|
227
237
|
if user:
|
|
@@ -254,6 +264,8 @@ def delete_slack_message(
|
|
|
254
264
|
Raises:
|
|
255
265
|
SlackApiError: If an error occurs while sending the message.
|
|
256
266
|
"""
|
|
267
|
+
from slack_sdk.errors import SlackApiError
|
|
268
|
+
|
|
257
269
|
try:
|
|
258
270
|
slack_client = _login_slack()
|
|
259
271
|
response = slack_client.chat_delete(channel=channel_id, ts=time_stamp)
|
camel/models/__init__.py
CHANGED
|
@@ -15,6 +15,8 @@ from .anthropic_model import AnthropicModel
|
|
|
15
15
|
from .base_model import BaseModelBackend
|
|
16
16
|
from .litellm_model import LiteLLMModel
|
|
17
17
|
from .model_factory import ModelFactory
|
|
18
|
+
from .nemotron_model import NemotronModel
|
|
19
|
+
from .ollama_model import OllamaModel
|
|
18
20
|
from .open_source_model import OpenSourceModel
|
|
19
21
|
from .openai_audio_models import OpenAIAudioModels
|
|
20
22
|
from .openai_model import OpenAIModel
|
|
@@ -31,4 +33,6 @@ __all__ = [
|
|
|
31
33
|
'ModelFactory',
|
|
32
34
|
'LiteLLMModel',
|
|
33
35
|
'OpenAIAudioModels',
|
|
36
|
+
'NemotronModel',
|
|
37
|
+
'OllamaModel',
|
|
34
38
|
]
|
camel/models/anthropic_model.py
CHANGED
|
@@ -35,6 +35,7 @@ class AnthropicModel(BaseModelBackend):
|
|
|
35
35
|
model_type: ModelType,
|
|
36
36
|
model_config_dict: Dict[str, Any],
|
|
37
37
|
api_key: Optional[str] = None,
|
|
38
|
+
url: Optional[str] = None,
|
|
38
39
|
) -> None:
|
|
39
40
|
r"""Constructor for Anthropic backend.
|
|
40
41
|
|
|
@@ -45,10 +46,11 @@ class AnthropicModel(BaseModelBackend):
|
|
|
45
46
|
be fed into Anthropic.messages.create().
|
|
46
47
|
api_key (Optional[str]): The API key for authenticating with the
|
|
47
48
|
Anthropic service. (default: :obj:`None`)
|
|
49
|
+
url (Optional[str]): The url to the model service.
|
|
48
50
|
"""
|
|
49
|
-
super().__init__(model_type, model_config_dict)
|
|
51
|
+
super().__init__(model_type, model_config_dict, api_key, url)
|
|
50
52
|
self._api_key = api_key or os.environ.get("ANTHROPIC_API_KEY")
|
|
51
|
-
self.client = Anthropic(api_key=self._api_key)
|
|
53
|
+
self.client = Anthropic(api_key=self._api_key, base_url=url)
|
|
52
54
|
self._token_counter: Optional[BaseTokenCounter] = None
|
|
53
55
|
|
|
54
56
|
def _convert_response_from_anthropic_to_openai(self, response):
|
camel/models/base_model.py
CHANGED
|
@@ -31,6 +31,7 @@ class BaseModelBackend(ABC):
|
|
|
31
31
|
model_type: ModelType,
|
|
32
32
|
model_config_dict: Dict[str, Any],
|
|
33
33
|
api_key: Optional[str] = None,
|
|
34
|
+
url: Optional[str] = None,
|
|
34
35
|
) -> None:
|
|
35
36
|
r"""Constructor for the model backend.
|
|
36
37
|
|
|
@@ -38,12 +39,14 @@ class BaseModelBackend(ABC):
|
|
|
38
39
|
model_type (ModelType): Model for which a backend is created.
|
|
39
40
|
model_config_dict (Dict[str, Any]): A config dictionary.
|
|
40
41
|
api_key (Optional[str]): The API key for authenticating with the
|
|
41
|
-
|
|
42
|
+
model service.
|
|
43
|
+
url (Optional[str]): The url to the model service.
|
|
42
44
|
"""
|
|
43
45
|
self.model_type = model_type
|
|
44
46
|
|
|
45
47
|
self.model_config_dict = model_config_dict
|
|
46
48
|
self._api_key = api_key
|
|
49
|
+
self._url = url
|
|
47
50
|
self.check_model_config()
|
|
48
51
|
|
|
49
52
|
@property
|
camel/models/model_factory.py
CHANGED
|
@@ -11,15 +11,17 @@
|
|
|
11
11
|
# See the License for the specific language governing permissions and
|
|
12
12
|
# limitations under the License.
|
|
13
13
|
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
|
|
14
|
-
from typing import Any, Dict, Optional
|
|
14
|
+
from typing import Any, Dict, Optional, Union
|
|
15
15
|
|
|
16
16
|
from camel.models.anthropic_model import AnthropicModel
|
|
17
17
|
from camel.models.base_model import BaseModelBackend
|
|
18
|
+
from camel.models.litellm_model import LiteLLMModel
|
|
19
|
+
from camel.models.ollama_model import OllamaModel
|
|
18
20
|
from camel.models.open_source_model import OpenSourceModel
|
|
19
21
|
from camel.models.openai_model import OpenAIModel
|
|
20
22
|
from camel.models.stub_model import StubModel
|
|
21
23
|
from camel.models.zhipuai_model import ZhipuAIModel
|
|
22
|
-
from camel.types import ModelType
|
|
24
|
+
from camel.types import ModelPlatformType, ModelType
|
|
23
25
|
|
|
24
26
|
|
|
25
27
|
class ModelFactory:
|
|
@@ -31,18 +33,24 @@ class ModelFactory:
|
|
|
31
33
|
|
|
32
34
|
@staticmethod
|
|
33
35
|
def create(
|
|
34
|
-
|
|
36
|
+
model_platform: ModelPlatformType,
|
|
37
|
+
model_type: Union[ModelType, str],
|
|
35
38
|
model_config_dict: Dict,
|
|
36
39
|
api_key: Optional[str] = None,
|
|
40
|
+
url: Optional[str] = None,
|
|
37
41
|
) -> BaseModelBackend:
|
|
38
42
|
r"""Creates an instance of `BaseModelBackend` of the specified type.
|
|
39
43
|
|
|
40
44
|
Args:
|
|
41
|
-
|
|
45
|
+
model_platform (ModelPlatformType): Platform from which the model
|
|
46
|
+
originates.
|
|
47
|
+
model_type (Union[ModelType, str]): Model for which a backend is
|
|
48
|
+
created can be a `str` for open source platforms.
|
|
42
49
|
model_config_dict (Dict): A dictionary that will be fed into
|
|
43
50
|
the backend constructor.
|
|
44
51
|
api_key (Optional[str]): The API key for authenticating with the
|
|
45
|
-
|
|
52
|
+
model service.
|
|
53
|
+
url (Optional[str]): The url to the model service.
|
|
46
54
|
|
|
47
55
|
Raises:
|
|
48
56
|
ValueError: If there is not backend for the model.
|
|
@@ -51,22 +59,35 @@ class ModelFactory:
|
|
|
51
59
|
BaseModelBackend: The initialized backend.
|
|
52
60
|
"""
|
|
53
61
|
model_class: Any
|
|
54
|
-
if model_type.is_openai:
|
|
55
|
-
model_class = OpenAIModel
|
|
56
|
-
elif model_type == ModelType.STUB:
|
|
57
|
-
model_class = StubModel
|
|
58
|
-
elif model_type.is_open_source:
|
|
59
|
-
model_class = OpenSourceModel
|
|
60
|
-
elif model_type.is_anthropic:
|
|
61
|
-
model_class = AnthropicModel
|
|
62
|
-
elif model_type.is_zhipuai:
|
|
63
|
-
model_class = ZhipuAIModel
|
|
64
|
-
else:
|
|
65
|
-
raise ValueError(f"Unknown model type `{model_type}` is input")
|
|
66
62
|
|
|
67
|
-
if model_type
|
|
68
|
-
|
|
63
|
+
if isinstance(model_type, ModelType):
|
|
64
|
+
if model_platform.is_open_source and model_type.is_open_source:
|
|
65
|
+
model_class = OpenSourceModel
|
|
66
|
+
return model_class(model_type, model_config_dict, url)
|
|
67
|
+
if model_platform.is_openai and model_type.is_openai:
|
|
68
|
+
model_class = OpenAIModel
|
|
69
|
+
elif model_platform.is_anthropic and model_type.is_anthropic:
|
|
70
|
+
model_class = AnthropicModel
|
|
71
|
+
elif model_platform.is_zhipuai and model_type.is_zhipuai:
|
|
72
|
+
model_class = ZhipuAIModel
|
|
73
|
+
elif model_type == ModelType.STUB:
|
|
74
|
+
model_class = StubModel
|
|
75
|
+
else:
|
|
76
|
+
raise ValueError(
|
|
77
|
+
f"Unknown pair of model platform `{model_platform}` "
|
|
78
|
+
f"and model type `{model_type}`."
|
|
79
|
+
)
|
|
80
|
+
elif isinstance(model_type, str):
|
|
81
|
+
if model_platform.is_ollama:
|
|
82
|
+
model_class = OllamaModel
|
|
83
|
+
elif model_platform.is_litellm:
|
|
84
|
+
model_class = LiteLLMModel
|
|
85
|
+
else:
|
|
86
|
+
raise ValueError(
|
|
87
|
+
f"Unknown pair of model platform `{model_platform}` "
|
|
88
|
+
f"and model type `{model_type}`."
|
|
89
|
+
)
|
|
69
90
|
else:
|
|
70
|
-
|
|
91
|
+
raise ValueError(f"Invalid model type `{model_type}` provided.")
|
|
71
92
|
|
|
72
|
-
return
|
|
93
|
+
return model_class(model_type, model_config_dict, api_key, url)
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the “License”);
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an “AS IS” BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
|
|
14
|
+
import os
|
|
15
|
+
from typing import List, Optional
|
|
16
|
+
|
|
17
|
+
from openai import OpenAI
|
|
18
|
+
|
|
19
|
+
from camel.messages import OpenAIMessage
|
|
20
|
+
from camel.types import ChatCompletion, ModelType
|
|
21
|
+
from camel.utils import (
|
|
22
|
+
BaseTokenCounter,
|
|
23
|
+
model_api_key_required,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class NemotronModel:
|
|
28
|
+
r"""Nemotron model API backend with OpenAI compatibility."""
|
|
29
|
+
|
|
30
|
+
# NOTE: Nemotron model doesn't support additional model config like OpenAI.
|
|
31
|
+
|
|
32
|
+
def __init__(
|
|
33
|
+
self,
|
|
34
|
+
model_type: ModelType,
|
|
35
|
+
api_key: Optional[str] = None,
|
|
36
|
+
) -> None:
|
|
37
|
+
r"""Constructor for Nvidia backend.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
model_type (ModelType): Model for which a backend is created.
|
|
41
|
+
api_key (Optional[str]): The API key for authenticating with the
|
|
42
|
+
Nvidia service. (default: :obj:`None`)
|
|
43
|
+
"""
|
|
44
|
+
self.model_type = model_type
|
|
45
|
+
url = os.environ.get('NVIDIA_API_BASE_URL', None)
|
|
46
|
+
self._api_key = api_key or os.environ.get("NVIDIA_API_KEY")
|
|
47
|
+
if not url or not self._api_key:
|
|
48
|
+
raise ValueError("The NVIDIA API base url and key should be set.")
|
|
49
|
+
self._client = OpenAI(
|
|
50
|
+
timeout=60, max_retries=3, base_url=url, api_key=self._api_key
|
|
51
|
+
)
|
|
52
|
+
self._token_counter: Optional[BaseTokenCounter] = None
|
|
53
|
+
|
|
54
|
+
@model_api_key_required
|
|
55
|
+
def run(
|
|
56
|
+
self,
|
|
57
|
+
messages: List[OpenAIMessage],
|
|
58
|
+
) -> ChatCompletion:
|
|
59
|
+
r"""Runs inference of OpenAI chat completion.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
messages (List[OpenAIMessage]): Message list.
|
|
63
|
+
|
|
64
|
+
Returns:
|
|
65
|
+
ChatCompletion.
|
|
66
|
+
"""
|
|
67
|
+
response = self._client.chat.completions.create(
|
|
68
|
+
messages=messages,
|
|
69
|
+
model=self.model_type.value,
|
|
70
|
+
)
|
|
71
|
+
return response
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the “License”);
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an “AS IS” BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
|
|
14
|
+
import os
|
|
15
|
+
from typing import Any, Dict, List, Optional, Union
|
|
16
|
+
|
|
17
|
+
from openai import OpenAI, Stream
|
|
18
|
+
|
|
19
|
+
from camel.configs import OPENAI_API_PARAMS
|
|
20
|
+
from camel.messages import OpenAIMessage
|
|
21
|
+
from camel.types import ChatCompletion, ChatCompletionChunk, ModelType
|
|
22
|
+
from camel.utils import BaseTokenCounter, OpenAITokenCounter
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class OllamaModel:
|
|
26
|
+
r"""Ollama service interface."""
|
|
27
|
+
|
|
28
|
+
# NOTE: Current `ModelType and `TokenCounter` desigen is not suitable,
|
|
29
|
+
# stream mode is not supported
|
|
30
|
+
|
|
31
|
+
def __init__(
|
|
32
|
+
self,
|
|
33
|
+
model_type: str,
|
|
34
|
+
model_config_dict: Dict[str, Any],
|
|
35
|
+
api_key: Optional[str] = None,
|
|
36
|
+
url: Optional[str] = None,
|
|
37
|
+
) -> None:
|
|
38
|
+
r"""Constructor for Ollama backend with OpenAI compatibility.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
model_type (str): Model for which a backend is created.
|
|
42
|
+
model_config_dict (Dict[str, Any]): A dictionary that will
|
|
43
|
+
be fed into openai.ChatCompletion.create().
|
|
44
|
+
api_key (Optional[str]): The API key for authenticating with the
|
|
45
|
+
model service. (default: :obj:`None`)
|
|
46
|
+
url (Optional[str]): The url to the model service.
|
|
47
|
+
"""
|
|
48
|
+
self.model_type = model_type
|
|
49
|
+
self.model_config_dict = model_config_dict
|
|
50
|
+
self._url = url or os.environ.get('OPENAI_API_BASE_URL')
|
|
51
|
+
self._api_key = api_key or os.environ.get("OPENAI_API_KEY")
|
|
52
|
+
# Use OpenAI cilent as interface call Ollama
|
|
53
|
+
# Reference: https://github.com/ollama/ollama/blob/main/docs/openai.md
|
|
54
|
+
self._client = OpenAI(
|
|
55
|
+
timeout=60,
|
|
56
|
+
max_retries=3,
|
|
57
|
+
base_url=self._url,
|
|
58
|
+
api_key=self._api_key,
|
|
59
|
+
)
|
|
60
|
+
self._token_counter: Optional[BaseTokenCounter] = None
|
|
61
|
+
|
|
62
|
+
@property
|
|
63
|
+
def token_counter(self) -> BaseTokenCounter:
|
|
64
|
+
r"""Initialize the token counter for the model backend.
|
|
65
|
+
|
|
66
|
+
Returns:
|
|
67
|
+
BaseTokenCounter: The token counter following the model's
|
|
68
|
+
tokenization style.
|
|
69
|
+
"""
|
|
70
|
+
# NOTE: Use OpenAITokenCounter temporarily
|
|
71
|
+
if not self._token_counter:
|
|
72
|
+
self._token_counter = OpenAITokenCounter(ModelType.GPT_3_5_TURBO)
|
|
73
|
+
return self._token_counter
|
|
74
|
+
|
|
75
|
+
def check_model_config(self):
|
|
76
|
+
r"""Check whether the model configuration contains any
|
|
77
|
+
unexpected arguments to OpenAI API.
|
|
78
|
+
|
|
79
|
+
Raises:
|
|
80
|
+
ValueError: If the model configuration dictionary contains any
|
|
81
|
+
unexpected arguments to OpenAI API.
|
|
82
|
+
"""
|
|
83
|
+
for param in self.model_config_dict:
|
|
84
|
+
if param not in OPENAI_API_PARAMS:
|
|
85
|
+
raise ValueError(
|
|
86
|
+
f"Unexpected argument `{param}` is "
|
|
87
|
+
"input into OpenAI model backend."
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
def run(
|
|
91
|
+
self,
|
|
92
|
+
messages: List[OpenAIMessage],
|
|
93
|
+
) -> Union[ChatCompletion, Stream[ChatCompletionChunk]]:
|
|
94
|
+
r"""Runs inference of OpenAI chat completion.
|
|
95
|
+
|
|
96
|
+
Args:
|
|
97
|
+
messages (List[OpenAIMessage]): Message list with the chat history
|
|
98
|
+
in OpenAI API format.
|
|
99
|
+
|
|
100
|
+
Returns:
|
|
101
|
+
Union[ChatCompletion, Stream[ChatCompletionChunk]]:
|
|
102
|
+
`ChatCompletion` in the non-stream mode, or
|
|
103
|
+
`Stream[ChatCompletionChunk]` in the stream mode.
|
|
104
|
+
"""
|
|
105
|
+
|
|
106
|
+
response = self._client.chat.completions.create(
|
|
107
|
+
messages=messages,
|
|
108
|
+
model=self.model_type,
|
|
109
|
+
**self.model_config_dict,
|
|
110
|
+
)
|
|
111
|
+
return response
|
|
112
|
+
|
|
113
|
+
@property
|
|
114
|
+
def stream(self) -> bool:
|
|
115
|
+
r"""Returns whether the model is in stream mode, which sends partial
|
|
116
|
+
results each time.
|
|
117
|
+
|
|
118
|
+
Returns:
|
|
119
|
+
bool: Whether the model is in stream mode.
|
|
120
|
+
"""
|
|
121
|
+
return self.model_config_dict.get('stream', False)
|
|
@@ -31,6 +31,8 @@ class OpenSourceModel(BaseModelBackend):
|
|
|
31
31
|
self,
|
|
32
32
|
model_type: ModelType,
|
|
33
33
|
model_config_dict: Dict[str, Any],
|
|
34
|
+
api_key: Optional[str] = None,
|
|
35
|
+
url: Optional[str] = None,
|
|
34
36
|
) -> None:
|
|
35
37
|
r"""Constructor for model backends of Open-source models.
|
|
36
38
|
|
|
@@ -38,8 +40,11 @@ class OpenSourceModel(BaseModelBackend):
|
|
|
38
40
|
model_type (ModelType): Model for which a backend is created.
|
|
39
41
|
model_config_dict (Dict[str, Any]): A dictionary that will
|
|
40
42
|
be fed into :obj:`openai.ChatCompletion.create()`.
|
|
43
|
+
api_key (Optional[str]): The API key for authenticating with the
|
|
44
|
+
model service. (ignored for open-source models)
|
|
45
|
+
url (Optional[str]): The url to the model service.
|
|
41
46
|
"""
|
|
42
|
-
super().__init__(model_type, model_config_dict)
|
|
47
|
+
super().__init__(model_type, model_config_dict, api_key, url)
|
|
43
48
|
self._token_counter: Optional[BaseTokenCounter] = None
|
|
44
49
|
|
|
45
50
|
# Check whether the input model type is open-source
|
|
@@ -65,7 +70,7 @@ class OpenSourceModel(BaseModelBackend):
|
|
|
65
70
|
)
|
|
66
71
|
|
|
67
72
|
# Load the server URL and check whether it is None
|
|
68
|
-
server_url: Optional[str] = self.model_config_dict.get(
|
|
73
|
+
server_url: Optional[str] = url or self.model_config_dict.get(
|
|
69
74
|
"server_url", None
|
|
70
75
|
)
|
|
71
76
|
if not server_url:
|