camel-ai 0.2.3a1__py3-none-any.whl → 0.2.3a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of camel-ai might be problematic. Click here for more details.

Files changed (87) hide show
  1. camel/__init__.py +1 -1
  2. camel/agents/chat_agent.py +93 -69
  3. camel/agents/knowledge_graph_agent.py +4 -6
  4. camel/bots/__init__.py +16 -2
  5. camel/bots/discord_app.py +138 -0
  6. camel/bots/slack/__init__.py +30 -0
  7. camel/bots/slack/models.py +158 -0
  8. camel/bots/slack/slack_app.py +255 -0
  9. camel/configs/__init__.py +1 -2
  10. camel/configs/anthropic_config.py +2 -5
  11. camel/configs/base_config.py +6 -6
  12. camel/configs/groq_config.py +2 -3
  13. camel/configs/ollama_config.py +1 -2
  14. camel/configs/openai_config.py +2 -23
  15. camel/configs/samba_config.py +2 -2
  16. camel/configs/togetherai_config.py +1 -1
  17. camel/configs/vllm_config.py +1 -1
  18. camel/configs/zhipuai_config.py +2 -3
  19. camel/embeddings/openai_embedding.py +2 -2
  20. camel/loaders/__init__.py +2 -0
  21. camel/loaders/chunkr_reader.py +163 -0
  22. camel/loaders/firecrawl_reader.py +3 -3
  23. camel/loaders/unstructured_io.py +35 -33
  24. camel/messages/__init__.py +1 -0
  25. camel/models/__init__.py +2 -4
  26. camel/models/anthropic_model.py +32 -26
  27. camel/models/azure_openai_model.py +39 -36
  28. camel/models/base_model.py +31 -20
  29. camel/models/gemini_model.py +37 -29
  30. camel/models/groq_model.py +29 -23
  31. camel/models/litellm_model.py +44 -61
  32. camel/models/mistral_model.py +32 -29
  33. camel/models/model_factory.py +66 -76
  34. camel/models/nemotron_model.py +33 -23
  35. camel/models/ollama_model.py +42 -47
  36. camel/models/{openai_compatibility_model.py → openai_compatible_model.py} +31 -49
  37. camel/models/openai_model.py +48 -29
  38. camel/models/reka_model.py +30 -28
  39. camel/models/samba_model.py +82 -177
  40. camel/models/stub_model.py +2 -2
  41. camel/models/togetherai_model.py +37 -43
  42. camel/models/vllm_model.py +43 -50
  43. camel/models/zhipuai_model.py +33 -27
  44. camel/retrievers/auto_retriever.py +28 -10
  45. camel/retrievers/vector_retriever.py +58 -47
  46. camel/societies/babyagi_playing.py +6 -3
  47. camel/societies/role_playing.py +5 -3
  48. camel/storages/graph_storages/graph_element.py +3 -5
  49. camel/storages/key_value_storages/json.py +6 -1
  50. camel/toolkits/__init__.py +20 -7
  51. camel/toolkits/arxiv_toolkit.py +155 -0
  52. camel/toolkits/ask_news_toolkit.py +653 -0
  53. camel/toolkits/base.py +2 -3
  54. camel/toolkits/code_execution.py +6 -7
  55. camel/toolkits/dalle_toolkit.py +6 -6
  56. camel/toolkits/{openai_function.py → function_tool.py} +34 -11
  57. camel/toolkits/github_toolkit.py +9 -10
  58. camel/toolkits/google_maps_toolkit.py +7 -7
  59. camel/toolkits/google_scholar_toolkit.py +146 -0
  60. camel/toolkits/linkedin_toolkit.py +7 -7
  61. camel/toolkits/math_toolkit.py +8 -8
  62. camel/toolkits/open_api_toolkit.py +5 -5
  63. camel/toolkits/reddit_toolkit.py +7 -7
  64. camel/toolkits/retrieval_toolkit.py +5 -5
  65. camel/toolkits/search_toolkit.py +9 -9
  66. camel/toolkits/slack_toolkit.py +11 -11
  67. camel/toolkits/twitter_toolkit.py +378 -452
  68. camel/toolkits/weather_toolkit.py +6 -6
  69. camel/toolkits/whatsapp_toolkit.py +177 -0
  70. camel/types/__init__.py +6 -1
  71. camel/types/enums.py +40 -85
  72. camel/types/openai_types.py +3 -0
  73. camel/types/unified_model_type.py +104 -0
  74. camel/utils/__init__.py +0 -2
  75. camel/utils/async_func.py +7 -7
  76. camel/utils/commons.py +32 -3
  77. camel/utils/token_counting.py +30 -212
  78. camel/workforce/role_playing_worker.py +1 -1
  79. camel/workforce/single_agent_worker.py +1 -1
  80. camel/workforce/task_channel.py +4 -3
  81. camel/workforce/workforce.py +4 -4
  82. camel_ai-0.2.3a2.dist-info/LICENSE +201 -0
  83. {camel_ai-0.2.3a1.dist-info → camel_ai-0.2.3a2.dist-info}/METADATA +27 -56
  84. {camel_ai-0.2.3a1.dist-info → camel_ai-0.2.3a2.dist-info}/RECORD +85 -76
  85. {camel_ai-0.2.3a1.dist-info → camel_ai-0.2.3a2.dist-info}/WHEEL +1 -1
  86. camel/bots/discord_bot.py +0 -206
  87. camel/models/open_source_model.py +0 -170
@@ -15,7 +15,7 @@ import os
15
15
  from typing import List, Literal
16
16
 
17
17
  from camel.toolkits.base import BaseToolkit
18
- from camel.toolkits.openai_function import OpenAIFunction
18
+ from camel.toolkits.function_tool import FunctionTool
19
19
 
20
20
 
21
21
  class WeatherToolkit(BaseToolkit):
@@ -157,17 +157,17 @@ class WeatherToolkit(BaseToolkit):
157
157
  )
158
158
  return error_message
159
159
 
160
- def get_tools(self) -> List[OpenAIFunction]:
161
- r"""Returns a list of OpenAIFunction objects representing the
160
+ def get_tools(self) -> List[FunctionTool]:
161
+ r"""Returns a list of FunctionTool objects representing the
162
162
  functions in the toolkit.
163
163
 
164
164
  Returns:
165
- List[OpenAIFunction]: A list of OpenAIFunction objects
165
+ List[FunctionTool]: A list of FunctionTool objects
166
166
  representing the functions in the toolkit.
167
167
  """
168
168
  return [
169
- OpenAIFunction(self.get_weather_data),
169
+ FunctionTool(self.get_weather_data),
170
170
  ]
171
171
 
172
172
 
173
- WEATHER_FUNCS: List[OpenAIFunction] = WeatherToolkit().get_tools()
173
+ WEATHER_FUNCS: List[FunctionTool] = WeatherToolkit().get_tools()
@@ -0,0 +1,177 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+
15
+ import os
16
+ from typing import Any, Dict, List, Union
17
+
18
+ import requests
19
+
20
+ from camel.toolkits import FunctionTool
21
+ from camel.toolkits.base import BaseToolkit
22
+ from camel.utils.commons import retry_request
23
+
24
+
25
+ class WhatsAppToolkit(BaseToolkit):
26
+ r"""A class representing a toolkit for WhatsApp operations.
27
+
28
+ This toolkit provides methods to interact with the WhatsApp Business API,
29
+ allowing users to send messages, retrieve message templates, and get
30
+ business profile information.
31
+
32
+ Attributes:
33
+ retries (int): Number of retries for API requests in case of failure.
34
+ delay (int): Delay between retries in seconds.
35
+ base_url (str): Base URL for the WhatsApp Business API.
36
+ version (str): API version.
37
+ """
38
+
39
+ def __init__(self, retries: int = 3, delay: int = 1):
40
+ r"""Initializes the WhatsAppToolkit with the specified number of
41
+ retries and delay.
42
+
43
+ Args:
44
+ retries (int): Number of times to retry the request in case of
45
+ failure. (default: :obj:`3`)
46
+ delay (int): Time in seconds to wait between retries.
47
+ (default: :obj:`1`)
48
+ """
49
+ self.retries = retries
50
+ self.delay = delay
51
+ self.base_url = "https://graph.facebook.com"
52
+ self.version = "v17.0"
53
+
54
+ self.access_token = os.environ.get("WHATSAPP_ACCESS_TOKEN", "")
55
+ self.phone_number_id = os.environ.get("WHATSAPP_PHONE_NUMBER_ID", "")
56
+
57
+ if not all([self.access_token, self.phone_number_id]):
58
+ raise ValueError(
59
+ "WhatsApp API credentials are not set. "
60
+ "Please set the WHATSAPP_ACCESS_TOKEN and "
61
+ "WHATSAPP_PHONE_NUMBER_ID environment variables."
62
+ )
63
+
64
+ def send_message(
65
+ self, to: str, message: str
66
+ ) -> Union[Dict[str, Any], str]:
67
+ r"""Sends a text message to a specified WhatsApp number.
68
+
69
+ Args:
70
+ to (str): The recipient's WhatsApp number in international format.
71
+ message (str): The text message to send.
72
+
73
+ Returns:
74
+ Union[Dict[str, Any], str]: A dictionary containing
75
+ the API response if successful, or an error message string if
76
+ failed.
77
+ """
78
+ url = f"{self.base_url}/{self.version}/{self.phone_number_id}/messages"
79
+ headers = {
80
+ "Authorization": f"Bearer {self.access_token}",
81
+ "Content-Type": "application/json",
82
+ }
83
+ data = {
84
+ "messaging_product": "whatsapp",
85
+ "to": to,
86
+ "type": "text",
87
+ "text": {"body": message},
88
+ }
89
+
90
+ try:
91
+ response = retry_request(
92
+ requests.post,
93
+ retries=self.retries,
94
+ delay=self.delay,
95
+ url=url,
96
+ headers=headers,
97
+ json=data,
98
+ )
99
+ response.raise_for_status()
100
+ return response.json()
101
+ except Exception as e:
102
+ return f"Failed to send message: {e!s}"
103
+
104
+ def get_message_templates(self) -> Union[List[Dict[str, Any]], str]:
105
+ r"""Retrieves all message templates for the WhatsApp Business account.
106
+
107
+ Returns:
108
+ Union[List[Dict[str, Any]], str]: A list of dictionaries containing
109
+ template information if successful, or an error message string
110
+ if failed.
111
+ """
112
+ url = (
113
+ f"{self.base_url}/{self.version}/{self.phone_number_id}"
114
+ "/message_templates"
115
+ )
116
+ headers = {"Authorization": f"Bearer {self.access_token}"}
117
+
118
+ try:
119
+ response = retry_request(
120
+ requests.get,
121
+ retries=self.retries,
122
+ delay=self.delay,
123
+ url=url,
124
+ headers=headers,
125
+ )
126
+ response.raise_for_status()
127
+ return response.json().get("data", [])
128
+ except Exception as e:
129
+ return f"Failed to retrieve message templates: {e!s}"
130
+
131
+ def get_business_profile(self) -> Union[Dict[str, Any], str]:
132
+ r"""Retrieves the WhatsApp Business profile information.
133
+
134
+ Returns:
135
+ Union[Dict[str, Any], str]: A dictionary containing the business
136
+ profile information if successful, or an error message string
137
+ if failed.
138
+ """
139
+ url = (
140
+ f"{self.base_url}/{self.version}/{self.phone_number_id}"
141
+ "/whatsapp_business_profile"
142
+ )
143
+ headers = {"Authorization": f"Bearer {self.access_token}"}
144
+ params = {
145
+ "fields": (
146
+ "about,address,description,email,profile_picture_url,"
147
+ "websites,vertical"
148
+ )
149
+ }
150
+
151
+ try:
152
+ response = retry_request(
153
+ requests.get,
154
+ retries=self.retries,
155
+ delay=self.delay,
156
+ url=url,
157
+ headers=headers,
158
+ params=params,
159
+ )
160
+ response.raise_for_status()
161
+ return response.json()
162
+ except Exception as e:
163
+ return f"Failed to retrieve business profile: {e!s}"
164
+
165
+ def get_tools(self) -> List[FunctionTool]:
166
+ r"""Returns a list of OpenAIFunction objects representing the
167
+ functions in the toolkit.
168
+
169
+ Returns:
170
+ List[OpenAIFunction]: A list of OpenAIFunction objects for the
171
+ toolkit methods.
172
+ """
173
+ return [
174
+ FunctionTool(self.send_message),
175
+ FunctionTool(self.get_message_templates),
176
+ FunctionTool(self.get_business_profile),
177
+ ]
camel/types/__init__.py CHANGED
@@ -28,6 +28,7 @@ from .enums import (
28
28
  VoiceType,
29
29
  )
30
30
  from .openai_types import (
31
+ NOT_GIVEN,
31
32
  ChatCompletion,
32
33
  ChatCompletionAssistantMessageParam,
33
34
  ChatCompletionChunk,
@@ -38,7 +39,9 @@ from .openai_types import (
38
39
  ChatCompletionUserMessageParam,
39
40
  Choice,
40
41
  CompletionUsage,
42
+ NotGiven,
41
43
  )
44
+ from .unified_model_type import UnifiedModelType
42
45
 
43
46
  __all__ = [
44
47
  'RoleType',
@@ -59,11 +62,13 @@ __all__ = [
59
62
  'ChatCompletionAssistantMessageParam',
60
63
  'ChatCompletionFunctionMessageParam',
61
64
  'CompletionUsage',
62
- 'OpenAIVideoType',
63
65
  'OpenAIImageType',
64
66
  'OpenAIVisionDetailType',
65
67
  'OpenAPIName',
66
68
  'ModelPlatformType',
67
69
  'AudioModelType',
68
70
  'VoiceType',
71
+ 'UnifiedModelType',
72
+ 'NOT_GIVEN',
73
+ 'NotGiven',
69
74
  ]
camel/types/enums.py CHANGED
@@ -11,8 +11,10 @@
11
11
  # See the License for the specific language governing permissions and
12
12
  # limitations under the License.
13
13
  # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
- import re
15
14
  from enum import Enum, EnumMeta
15
+ from typing import cast
16
+
17
+ from camel.types.unified_model_type import UnifiedModelType
16
18
 
17
19
 
18
20
  class RoleType(Enum):
@@ -23,7 +25,9 @@ class RoleType(Enum):
23
25
  DEFAULT = "default"
24
26
 
25
27
 
26
- class ModelType(Enum):
28
+ class ModelType(UnifiedModelType, Enum):
29
+ DEFAULT = "gpt-4o-mini"
30
+
27
31
  GPT_3_5_TURBO = "gpt-3.5-turbo"
28
32
  GPT_4 = "gpt-4"
29
33
  GPT_4_TURBO = "gpt-4-turbo"
@@ -33,7 +37,6 @@ class ModelType(Enum):
33
37
  O1_MINI = "o1-mini"
34
38
 
35
39
  GLM_4 = "glm-4"
36
- GLM_4_OPEN_SOURCE = "glm-4-open-source"
37
40
  GLM_4V = 'glm-4v'
38
41
  GLM_3_TURBO = "glm-3-turbo"
39
42
 
@@ -48,13 +51,6 @@ class ModelType(Enum):
48
51
 
49
52
  STUB = "stub"
50
53
 
51
- LLAMA_2 = "llama-2"
52
- LLAMA_3 = "llama-3"
53
- VICUNA = "vicuna"
54
- VICUNA_16K = "vicuna-16k"
55
-
56
- QWEN_2 = "qwen-2"
57
-
58
54
  # Legacy anthropic models
59
55
  # NOTE: anthropic legacy models only Claude 2.1 has system prompt support
60
56
  CLAUDE_2_1 = "claude-2.1"
@@ -75,13 +71,15 @@ class ModelType(Enum):
75
71
  GEMINI_1_5_PRO = "gemini-1.5-pro"
76
72
 
77
73
  # Mistral AI models
78
- MISTRAL_LARGE = "mistral-large-latest"
79
- MISTRAL_NEMO = "open-mistral-nemo"
80
- MISTRAL_CODESTRAL = "codestral-latest"
74
+ MISTRAL_3B = "ministral-3b-latest"
81
75
  MISTRAL_7B = "open-mistral-7b"
76
+ MISTRAL_8B = "ministral-8b-latest"
77
+ MISTRAL_CODESTRAL = "codestral-latest"
78
+ MISTRAL_CODESTRAL_MAMBA = "open-codestral-mamba"
79
+ MISTRAL_LARGE = "mistral-large-latest"
82
80
  MISTRAL_MIXTRAL_8x7B = "open-mixtral-8x7b"
83
81
  MISTRAL_MIXTRAL_8x22B = "open-mixtral-8x22b"
84
- MISTRAL_CODESTRAL_MAMBA = "open-codestral-mamba"
82
+ MISTRAL_NEMO = "open-mistral-nemo"
85
83
  MISTRAL_PIXTRAL_12B = "pixtral-12b-2409"
86
84
 
87
85
  # Reka models
@@ -89,6 +87,12 @@ class ModelType(Enum):
89
87
  REKA_FLASH = "reka-flash"
90
88
  REKA_EDGE = "reka-edge"
91
89
 
90
+ def __str__(self):
91
+ return self.value
92
+
93
+ def __new__(cls, value) -> "ModelType":
94
+ return cast("ModelType", UnifiedModelType.__new__(cls, value))
95
+
92
96
  @property
93
97
  def value_for_tiktoken(self) -> str:
94
98
  if self.is_openai:
@@ -96,7 +100,7 @@ class ModelType(Enum):
96
100
  return "gpt-4o-mini"
97
101
 
98
102
  @property
99
- def supports_tool_calling(self) -> bool:
103
+ def support_native_tool_calling(self) -> bool:
100
104
  return any([self.is_openai, self.is_gemini, self.is_mistral])
101
105
 
102
106
  @property
@@ -133,18 +137,6 @@ class ModelType(Enum):
133
137
  ModelType.GLM_4V,
134
138
  }
135
139
 
136
- @property
137
- def is_open_source(self) -> bool:
138
- r"""Returns whether this type of models is open-source."""
139
- return self in {
140
- ModelType.LLAMA_2,
141
- ModelType.LLAMA_3,
142
- ModelType.QWEN_2,
143
- ModelType.GLM_4_OPEN_SOURCE,
144
- ModelType.VICUNA,
145
- ModelType.VICUNA_16K,
146
- }
147
-
148
140
  @property
149
141
  def is_anthropic(self) -> bool:
150
142
  r"""Returns whether this type of models is Anthropic-released model.
@@ -188,6 +180,8 @@ class ModelType(Enum):
188
180
  ModelType.MISTRAL_MIXTRAL_8x22B,
189
181
  ModelType.MISTRAL_CODESTRAL_MAMBA,
190
182
  ModelType.MISTRAL_PIXTRAL_12B,
183
+ ModelType.MISTRAL_8B,
184
+ ModelType.MISTRAL_3B,
191
185
  }
192
186
 
193
187
  @property
@@ -208,7 +202,10 @@ class ModelType(Enum):
208
202
  Returns:
209
203
  bool: Whether this type of models is gemini.
210
204
  """
211
- return self in {ModelType.GEMINI_1_5_FLASH, ModelType.GEMINI_1_5_PRO}
205
+ return self in {
206
+ ModelType.GEMINI_1_5_FLASH,
207
+ ModelType.GEMINI_1_5_PRO,
208
+ }
212
209
 
213
210
  @property
214
211
  def is_reka(self) -> bool:
@@ -232,11 +229,7 @@ class ModelType(Enum):
232
229
  """
233
230
  if self is ModelType.GLM_4V:
234
231
  return 1024
235
- elif self is ModelType.VICUNA:
236
- # reference: https://lmsys.org/blog/2023-03-30-vicuna/
237
- return 2048
238
232
  elif self in {
239
- ModelType.LLAMA_2,
240
233
  ModelType.NEMOTRON_4_REWARD,
241
234
  ModelType.STUB,
242
235
  ModelType.REKA_CORE,
@@ -250,15 +243,12 @@ class ModelType(Enum):
250
243
  ModelType.GROQ_LLAMA_3_70B,
251
244
  ModelType.GROQ_GEMMA_7B_IT,
252
245
  ModelType.GROQ_GEMMA_2_9B_IT,
253
- ModelType.LLAMA_3,
254
246
  ModelType.GLM_3_TURBO,
255
247
  ModelType.GLM_4,
256
- ModelType.GLM_4_OPEN_SOURCE,
257
248
  }:
258
249
  return 8_192
259
250
  elif self in {
260
251
  ModelType.GPT_3_5_TURBO,
261
- ModelType.VICUNA_16K,
262
252
  }:
263
253
  return 16_384
264
254
  elif self in {
@@ -270,7 +260,10 @@ class ModelType(Enum):
270
260
  return 32_768
271
261
  elif self in {ModelType.MISTRAL_MIXTRAL_8x22B}:
272
262
  return 64_000
273
- elif self in {ModelType.CLAUDE_2_0, ModelType.CLAUDE_INSTANT_1_2}:
263
+ elif self in {
264
+ ModelType.CLAUDE_2_0,
265
+ ModelType.CLAUDE_INSTANT_1_2,
266
+ }:
274
267
  return 100_000
275
268
  elif self in {
276
269
  ModelType.GPT_4O,
@@ -281,7 +274,8 @@ class ModelType(Enum):
281
274
  ModelType.MISTRAL_LARGE,
282
275
  ModelType.MISTRAL_NEMO,
283
276
  ModelType.MISTRAL_PIXTRAL_12B,
284
- ModelType.QWEN_2,
277
+ ModelType.MISTRAL_8B,
278
+ ModelType.MISTRAL_3B,
285
279
  }:
286
280
  return 128_000
287
281
  elif self in {
@@ -302,48 +296,14 @@ class ModelType(Enum):
302
296
  ModelType.MISTRAL_CODESTRAL_MAMBA,
303
297
  }:
304
298
  return 256_000
305
- elif self in {ModelType.GEMINI_1_5_FLASH, ModelType.GEMINI_1_5_PRO}:
299
+ elif self in {
300
+ ModelType.GEMINI_1_5_FLASH,
301
+ ModelType.GEMINI_1_5_PRO,
302
+ }:
306
303
  return 1_048_576
307
304
  else:
308
305
  raise ValueError("Unknown model type")
309
306
 
310
- def validate_model_name(self, model_name: str) -> bool:
311
- r"""Checks whether the model type and the model name matches.
312
-
313
- Args:
314
- model_name (str): The name of the model, e.g. "vicuna-7b-v1.5".
315
-
316
- Returns:
317
- bool: Whether the model type matches the model name.
318
- """
319
- if self is ModelType.VICUNA:
320
- pattern = r'^vicuna-\d+b-v\d+\.\d+$'
321
- return bool(re.match(pattern, model_name))
322
- elif self is ModelType.VICUNA_16K:
323
- pattern = r'^vicuna-\d+b-v\d+\.\d+-16k$'
324
- return bool(re.match(pattern, model_name))
325
- elif self is ModelType.LLAMA_2:
326
- return (
327
- self.value in model_name.lower()
328
- or "llama2" in model_name.lower()
329
- )
330
- elif self is ModelType.LLAMA_3:
331
- return (
332
- self.value in model_name.lower()
333
- or "llama3" in model_name.lower()
334
- )
335
- elif self is ModelType.QWEN_2:
336
- return (
337
- self.value in model_name.lower()
338
- or "qwen2" in model_name.lower()
339
- )
340
- elif self is ModelType.GLM_4_OPEN_SOURCE:
341
- return (
342
- 'glm-4' in model_name.lower() or "glm4" in model_name.lower()
343
- )
344
- else:
345
- return self.value in model_name.lower()
346
-
347
307
 
348
308
  class EmbeddingModelType(Enum):
349
309
  TEXT_EMBEDDING_ADA_2 = "text-embedding-ada-002"
@@ -469,21 +429,21 @@ class OpenAPIName(Enum):
469
429
 
470
430
 
471
431
  class ModelPlatformType(Enum):
432
+ DEFAULT = "openai"
433
+
472
434
  OPENAI = "openai"
473
435
  AZURE = "azure"
474
436
  ANTHROPIC = "anthropic"
475
437
  GROQ = "groq"
476
- OPEN_SOURCE = "open-source"
477
438
  OLLAMA = "ollama"
478
439
  LITELLM = "litellm"
479
440
  ZHIPU = "zhipuai"
480
- DEFAULT = "default"
481
441
  GEMINI = "gemini"
482
442
  VLLM = "vllm"
483
443
  MISTRAL = "mistral"
484
444
  REKA = "reka"
485
445
  TOGETHER = "together"
486
- OPENAI_COMPATIBILITY_MODEL = "openai-compatibility-model"
446
+ OPENAI_COMPATIBLE_MODEL = "openai-compatible-model"
487
447
  SAMBA = "samba-nova"
488
448
 
489
449
  @property
@@ -537,15 +497,10 @@ class ModelPlatformType(Enum):
537
497
  return self is ModelPlatformType.MISTRAL
538
498
 
539
499
  @property
540
- def is_open_source(self) -> bool:
541
- r"""Returns whether this platform is opensource."""
542
- return self is ModelPlatformType.OPEN_SOURCE
543
-
544
- @property
545
- def is_openai_compatibility_model(self) -> bool:
500
+ def is_openai_compatible_model(self) -> bool:
546
501
  r"""Returns whether this is a platform supporting openai
547
502
  compatibility"""
548
- return self is ModelPlatformType.OPENAI_COMPATIBILITY_MODEL
503
+ return self is ModelPlatformType.OPENAI_COMPATIBLE_MODEL
549
504
 
550
505
  @property
551
506
  def is_gemini(self) -> bool:
@@ -31,6 +31,7 @@ from openai.types.chat.chat_completion_user_message_param import (
31
31
  ChatCompletionUserMessageParam,
32
32
  )
33
33
  from openai.types.completion_usage import CompletionUsage
34
+ from openai._types import NOT_GIVEN, NotGiven
34
35
 
35
36
  Choice = Choice
36
37
  ChatCompletion = ChatCompletion
@@ -42,3 +43,5 @@ ChatCompletionUserMessageParam = ChatCompletionUserMessageParam
42
43
  ChatCompletionAssistantMessageParam = ChatCompletionAssistantMessageParam
43
44
  ChatCompletionFunctionMessageParam = ChatCompletionFunctionMessageParam
44
45
  CompletionUsage = CompletionUsage
46
+ NOT_GIVEN = NOT_GIVEN
47
+ NotGiven = NotGiven
@@ -0,0 +1,104 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ import logging
15
+ from threading import Lock
16
+ from typing import TYPE_CHECKING, ClassVar, Dict, Union, cast
17
+
18
+ if TYPE_CHECKING:
19
+ from camel.types import ModelType
20
+
21
+
22
+ class UnifiedModelType(str):
23
+ r"""Class used for support both :obj:`ModelType` and :obj:`str` to be used
24
+ to represent a model type in a unified way. This class is a subclass of
25
+ :obj:`str` so that it can be used as string seamlessly.
26
+
27
+ Args:
28
+ value (Union[ModelType, str]): The value of the model type.
29
+ """
30
+
31
+ _cache: ClassVar[Dict[str, "UnifiedModelType"]] = {}
32
+ _lock: ClassVar[Lock] = Lock()
33
+
34
+ def __new__(cls, value: Union["ModelType", str]) -> "UnifiedModelType":
35
+ with cls._lock:
36
+ if value not in cls._cache:
37
+ instance = super().__new__(cls, value)
38
+ cls._cache[value] = cast(UnifiedModelType, instance)
39
+ else:
40
+ instance = cls._cache[value]
41
+ return instance
42
+
43
+ def __init__(self, value: Union["ModelType", str]) -> None:
44
+ pass
45
+
46
+ @property
47
+ def value_for_tiktoken(self) -> str:
48
+ r"""Returns the model name for TikToken."""
49
+ return "gpt-4o-mini"
50
+
51
+ @property
52
+ def token_limit(self) -> int:
53
+ r"""Returns the token limit for the model. Here we set the default
54
+ value as `999_999_999` if it's not provided from `model_config_dict`"""
55
+ logging.warning(
56
+ "Invalid or missing `max_tokens` in `model_config_dict`. "
57
+ "Defaulting to 999_999_999 tokens."
58
+ )
59
+ return 999_999_999
60
+
61
+ @property
62
+ def is_openai(self) -> bool:
63
+ r"""Returns whether the model is an OpenAI model."""
64
+ return True
65
+
66
+ @property
67
+ def is_anthropic(self) -> bool:
68
+ r"""Returns whether the model is an Anthropic model."""
69
+ return True
70
+
71
+ @property
72
+ def is_azure_openai(self) -> bool:
73
+ r"""Returns whether the model is an Azure OpenAI model."""
74
+ return True
75
+
76
+ @property
77
+ def is_groq(self) -> bool:
78
+ r"""Returns whether the model is a Groq served model."""
79
+ return True
80
+
81
+ @property
82
+ def is_zhipuai(self) -> bool:
83
+ r"""Returns whether the model is a Zhipuai model."""
84
+ return True
85
+
86
+ @property
87
+ def is_gemini(self) -> bool:
88
+ r"""Returns whether the model is a Gemini model."""
89
+ return True
90
+
91
+ @property
92
+ def is_mistral(self) -> bool:
93
+ r"""Returns whether the model is a Mistral model."""
94
+ return True
95
+
96
+ @property
97
+ def is_reka(self) -> bool:
98
+ r"""Returns whether the model is a Reka model."""
99
+ return True
100
+
101
+ @property
102
+ def support_native_tool_calling(self) -> bool:
103
+ r"""Returns whether the model supports native tool calling."""
104
+ return False
camel/utils/__init__.py CHANGED
@@ -43,7 +43,6 @@ from .token_counting import (
43
43
  LiteLLMTokenCounter,
44
44
  MistralTokenCounter,
45
45
  OpenAITokenCounter,
46
- OpenSourceTokenCounter,
47
46
  get_model_encoding,
48
47
  )
49
48
 
@@ -60,7 +59,6 @@ __all__ = [
60
59
  'get_model_encoding',
61
60
  'BaseTokenCounter',
62
61
  'OpenAITokenCounter',
63
- 'OpenSourceTokenCounter',
64
62
  'LiteLLMTokenCounter',
65
63
  'Constants',
66
64
  'text_extract_from_web',
camel/utils/async_func.py CHANGED
@@ -14,20 +14,20 @@
14
14
  import asyncio
15
15
  from copy import deepcopy
16
16
 
17
- from camel.toolkits import OpenAIFunction
17
+ from camel.toolkits import FunctionTool
18
18
 
19
19
 
20
- def sync_funcs_to_async(funcs: list[OpenAIFunction]) -> list[OpenAIFunction]:
20
+ def sync_funcs_to_async(funcs: list[FunctionTool]) -> list[FunctionTool]:
21
21
  r"""Convert a list of Python synchronous functions to Python
22
22
  asynchronous functions.
23
23
 
24
24
  Args:
25
- funcs (list[OpenAIFunction]): List of Python synchronous
26
- functions in the :obj:`OpenAIFunction` format.
25
+ funcs (list[FunctionTool]): List of Python synchronous
26
+ functions in the :obj:`FunctionTool` format.
27
27
 
28
28
  Returns:
29
- list[OpenAIFunction]: List of Python asynchronous functions
30
- in the :obj:`OpenAIFunction` format.
29
+ list[FunctionTool]: List of Python asynchronous functions
30
+ in the :obj:`FunctionTool` format.
31
31
  """
32
32
  async_funcs = []
33
33
  for func in funcs:
@@ -37,6 +37,6 @@ def sync_funcs_to_async(funcs: list[OpenAIFunction]) -> list[OpenAIFunction]:
37
37
  return asyncio.to_thread(sync_func, *args, **kwargs) # noqa: B023
38
38
 
39
39
  async_funcs.append(
40
- OpenAIFunction(async_callable, deepcopy(func.openai_tool_schema))
40
+ FunctionTool(async_callable, deepcopy(func.openai_tool_schema))
41
41
  )
42
42
  return async_funcs