camel-ai 0.2.0__py3-none-any.whl → 0.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of camel-ai might be problematic. Click here for more details.

Files changed (102) hide show
  1. camel/__init__.py +1 -1
  2. camel/agents/chat_agent.py +326 -115
  3. camel/agents/knowledge_graph_agent.py +4 -6
  4. camel/bots/__init__.py +34 -0
  5. camel/bots/discord_app.py +138 -0
  6. camel/bots/slack/__init__.py +30 -0
  7. camel/bots/slack/models.py +158 -0
  8. camel/bots/slack/slack_app.py +255 -0
  9. camel/bots/telegram_bot.py +82 -0
  10. camel/configs/__init__.py +1 -2
  11. camel/configs/anthropic_config.py +2 -5
  12. camel/configs/base_config.py +6 -6
  13. camel/configs/gemini_config.py +1 -1
  14. camel/configs/groq_config.py +2 -3
  15. camel/configs/ollama_config.py +1 -2
  16. camel/configs/openai_config.py +2 -23
  17. camel/configs/samba_config.py +2 -2
  18. camel/configs/togetherai_config.py +1 -1
  19. camel/configs/vllm_config.py +1 -1
  20. camel/configs/zhipuai_config.py +2 -3
  21. camel/embeddings/openai_embedding.py +2 -2
  22. camel/loaders/__init__.py +2 -0
  23. camel/loaders/chunkr_reader.py +163 -0
  24. camel/loaders/firecrawl_reader.py +13 -45
  25. camel/loaders/unstructured_io.py +65 -29
  26. camel/messages/__init__.py +1 -0
  27. camel/messages/func_message.py +2 -2
  28. camel/models/__init__.py +2 -4
  29. camel/models/anthropic_model.py +32 -26
  30. camel/models/azure_openai_model.py +39 -36
  31. camel/models/base_model.py +31 -20
  32. camel/models/gemini_model.py +37 -29
  33. camel/models/groq_model.py +29 -23
  34. camel/models/litellm_model.py +44 -61
  35. camel/models/mistral_model.py +33 -30
  36. camel/models/model_factory.py +66 -76
  37. camel/models/nemotron_model.py +33 -23
  38. camel/models/ollama_model.py +42 -47
  39. camel/models/{openai_compatibility_model.py → openai_compatible_model.py} +36 -41
  40. camel/models/openai_model.py +48 -29
  41. camel/models/reka_model.py +30 -28
  42. camel/models/samba_model.py +82 -177
  43. camel/models/stub_model.py +2 -2
  44. camel/models/togetherai_model.py +37 -43
  45. camel/models/vllm_model.py +43 -50
  46. camel/models/zhipuai_model.py +33 -27
  47. camel/retrievers/auto_retriever.py +28 -10
  48. camel/retrievers/vector_retriever.py +72 -44
  49. camel/societies/babyagi_playing.py +6 -3
  50. camel/societies/role_playing.py +17 -3
  51. camel/storages/__init__.py +2 -0
  52. camel/storages/graph_storages/__init__.py +2 -0
  53. camel/storages/graph_storages/graph_element.py +3 -5
  54. camel/storages/graph_storages/nebula_graph.py +547 -0
  55. camel/storages/key_value_storages/json.py +6 -1
  56. camel/tasks/task.py +11 -4
  57. camel/tasks/task_prompt.py +4 -0
  58. camel/toolkits/__init__.py +20 -7
  59. camel/toolkits/arxiv_toolkit.py +155 -0
  60. camel/toolkits/ask_news_toolkit.py +653 -0
  61. camel/toolkits/base.py +2 -3
  62. camel/toolkits/code_execution.py +6 -7
  63. camel/toolkits/dalle_toolkit.py +6 -6
  64. camel/toolkits/{openai_function.py → function_tool.py} +34 -11
  65. camel/toolkits/github_toolkit.py +9 -10
  66. camel/toolkits/google_maps_toolkit.py +7 -7
  67. camel/toolkits/google_scholar_toolkit.py +146 -0
  68. camel/toolkits/linkedin_toolkit.py +7 -7
  69. camel/toolkits/math_toolkit.py +8 -8
  70. camel/toolkits/open_api_toolkit.py +5 -5
  71. camel/toolkits/reddit_toolkit.py +7 -7
  72. camel/toolkits/retrieval_toolkit.py +5 -5
  73. camel/toolkits/search_toolkit.py +9 -9
  74. camel/toolkits/slack_toolkit.py +11 -11
  75. camel/toolkits/twitter_toolkit.py +378 -452
  76. camel/toolkits/weather_toolkit.py +6 -6
  77. camel/toolkits/whatsapp_toolkit.py +177 -0
  78. camel/types/__init__.py +6 -1
  79. camel/types/enums.py +43 -85
  80. camel/types/openai_types.py +3 -0
  81. camel/types/unified_model_type.py +104 -0
  82. camel/utils/__init__.py +0 -2
  83. camel/utils/async_func.py +7 -7
  84. camel/utils/commons.py +40 -4
  85. camel/utils/token_counting.py +30 -212
  86. camel/workforce/__init__.py +6 -6
  87. camel/workforce/base.py +9 -5
  88. camel/workforce/prompts.py +179 -0
  89. camel/workforce/role_playing_worker.py +181 -0
  90. camel/workforce/{single_agent_node.py → single_agent_worker.py} +49 -23
  91. camel/workforce/task_channel.py +7 -8
  92. camel/workforce/utils.py +20 -50
  93. camel/workforce/{worker_node.py → worker.py} +15 -12
  94. camel/workforce/workforce.py +456 -19
  95. camel_ai-0.2.3.dist-info/LICENSE +201 -0
  96. {camel_ai-0.2.0.dist-info → camel_ai-0.2.3.dist-info}/METADATA +39 -65
  97. {camel_ai-0.2.0.dist-info → camel_ai-0.2.3.dist-info}/RECORD +98 -86
  98. {camel_ai-0.2.0.dist-info → camel_ai-0.2.3.dist-info}/WHEEL +1 -1
  99. camel/models/open_source_model.py +0 -170
  100. camel/workforce/manager_node.py +0 -299
  101. camel/workforce/role_playing_node.py +0 -168
  102. camel/workforce/workforce_prompt.py +0 -125
@@ -15,7 +15,7 @@ import os
15
15
  from typing import List, Literal
16
16
 
17
17
  from camel.toolkits.base import BaseToolkit
18
- from camel.toolkits.openai_function import OpenAIFunction
18
+ from camel.toolkits.function_tool import FunctionTool
19
19
 
20
20
 
21
21
  class WeatherToolkit(BaseToolkit):
@@ -157,17 +157,17 @@ class WeatherToolkit(BaseToolkit):
157
157
  )
158
158
  return error_message
159
159
 
160
- def get_tools(self) -> List[OpenAIFunction]:
161
- r"""Returns a list of OpenAIFunction objects representing the
160
+ def get_tools(self) -> List[FunctionTool]:
161
+ r"""Returns a list of FunctionTool objects representing the
162
162
  functions in the toolkit.
163
163
 
164
164
  Returns:
165
- List[OpenAIFunction]: A list of OpenAIFunction objects
165
+ List[FunctionTool]: A list of FunctionTool objects
166
166
  representing the functions in the toolkit.
167
167
  """
168
168
  return [
169
- OpenAIFunction(self.get_weather_data),
169
+ FunctionTool(self.get_weather_data),
170
170
  ]
171
171
 
172
172
 
173
- WEATHER_FUNCS: List[OpenAIFunction] = WeatherToolkit().get_tools()
173
+ WEATHER_FUNCS: List[FunctionTool] = WeatherToolkit().get_tools()
@@ -0,0 +1,177 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+
15
+ import os
16
+ from typing import Any, Dict, List, Union
17
+
18
+ import requests
19
+
20
+ from camel.toolkits import FunctionTool
21
+ from camel.toolkits.base import BaseToolkit
22
+ from camel.utils.commons import retry_request
23
+
24
+
25
+ class WhatsAppToolkit(BaseToolkit):
26
+ r"""A class representing a toolkit for WhatsApp operations.
27
+
28
+ This toolkit provides methods to interact with the WhatsApp Business API,
29
+ allowing users to send messages, retrieve message templates, and get
30
+ business profile information.
31
+
32
+ Attributes:
33
+ retries (int): Number of retries for API requests in case of failure.
34
+ delay (int): Delay between retries in seconds.
35
+ base_url (str): Base URL for the WhatsApp Business API.
36
+ version (str): API version.
37
+ """
38
+
39
+ def __init__(self, retries: int = 3, delay: int = 1):
40
+ r"""Initializes the WhatsAppToolkit with the specified number of
41
+ retries and delay.
42
+
43
+ Args:
44
+ retries (int): Number of times to retry the request in case of
45
+ failure. (default: :obj:`3`)
46
+ delay (int): Time in seconds to wait between retries.
47
+ (default: :obj:`1`)
48
+ """
49
+ self.retries = retries
50
+ self.delay = delay
51
+ self.base_url = "https://graph.facebook.com"
52
+ self.version = "v17.0"
53
+
54
+ self.access_token = os.environ.get("WHATSAPP_ACCESS_TOKEN", "")
55
+ self.phone_number_id = os.environ.get("WHATSAPP_PHONE_NUMBER_ID", "")
56
+
57
+ if not all([self.access_token, self.phone_number_id]):
58
+ raise ValueError(
59
+ "WhatsApp API credentials are not set. "
60
+ "Please set the WHATSAPP_ACCESS_TOKEN and "
61
+ "WHATSAPP_PHONE_NUMBER_ID environment variables."
62
+ )
63
+
64
+ def send_message(
65
+ self, to: str, message: str
66
+ ) -> Union[Dict[str, Any], str]:
67
+ r"""Sends a text message to a specified WhatsApp number.
68
+
69
+ Args:
70
+ to (str): The recipient's WhatsApp number in international format.
71
+ message (str): The text message to send.
72
+
73
+ Returns:
74
+ Union[Dict[str, Any], str]: A dictionary containing
75
+ the API response if successful, or an error message string if
76
+ failed.
77
+ """
78
+ url = f"{self.base_url}/{self.version}/{self.phone_number_id}/messages"
79
+ headers = {
80
+ "Authorization": f"Bearer {self.access_token}",
81
+ "Content-Type": "application/json",
82
+ }
83
+ data = {
84
+ "messaging_product": "whatsapp",
85
+ "to": to,
86
+ "type": "text",
87
+ "text": {"body": message},
88
+ }
89
+
90
+ try:
91
+ response = retry_request(
92
+ requests.post,
93
+ retries=self.retries,
94
+ delay=self.delay,
95
+ url=url,
96
+ headers=headers,
97
+ json=data,
98
+ )
99
+ response.raise_for_status()
100
+ return response.json()
101
+ except Exception as e:
102
+ return f"Failed to send message: {e!s}"
103
+
104
+ def get_message_templates(self) -> Union[List[Dict[str, Any]], str]:
105
+ r"""Retrieves all message templates for the WhatsApp Business account.
106
+
107
+ Returns:
108
+ Union[List[Dict[str, Any]], str]: A list of dictionaries containing
109
+ template information if successful, or an error message string
110
+ if failed.
111
+ """
112
+ url = (
113
+ f"{self.base_url}/{self.version}/{self.phone_number_id}"
114
+ "/message_templates"
115
+ )
116
+ headers = {"Authorization": f"Bearer {self.access_token}"}
117
+
118
+ try:
119
+ response = retry_request(
120
+ requests.get,
121
+ retries=self.retries,
122
+ delay=self.delay,
123
+ url=url,
124
+ headers=headers,
125
+ )
126
+ response.raise_for_status()
127
+ return response.json().get("data", [])
128
+ except Exception as e:
129
+ return f"Failed to retrieve message templates: {e!s}"
130
+
131
+ def get_business_profile(self) -> Union[Dict[str, Any], str]:
132
+ r"""Retrieves the WhatsApp Business profile information.
133
+
134
+ Returns:
135
+ Union[Dict[str, Any], str]: A dictionary containing the business
136
+ profile information if successful, or an error message string
137
+ if failed.
138
+ """
139
+ url = (
140
+ f"{self.base_url}/{self.version}/{self.phone_number_id}"
141
+ "/whatsapp_business_profile"
142
+ )
143
+ headers = {"Authorization": f"Bearer {self.access_token}"}
144
+ params = {
145
+ "fields": (
146
+ "about,address,description,email,profile_picture_url,"
147
+ "websites,vertical"
148
+ )
149
+ }
150
+
151
+ try:
152
+ response = retry_request(
153
+ requests.get,
154
+ retries=self.retries,
155
+ delay=self.delay,
156
+ url=url,
157
+ headers=headers,
158
+ params=params,
159
+ )
160
+ response.raise_for_status()
161
+ return response.json()
162
+ except Exception as e:
163
+ return f"Failed to retrieve business profile: {e!s}"
164
+
165
+ def get_tools(self) -> List[FunctionTool]:
166
+ r"""Returns a list of OpenAIFunction objects representing the
167
+ functions in the toolkit.
168
+
169
+ Returns:
170
+ List[OpenAIFunction]: A list of OpenAIFunction objects for the
171
+ toolkit methods.
172
+ """
173
+ return [
174
+ FunctionTool(self.send_message),
175
+ FunctionTool(self.get_message_templates),
176
+ FunctionTool(self.get_business_profile),
177
+ ]
camel/types/__init__.py CHANGED
@@ -28,6 +28,7 @@ from .enums import (
28
28
  VoiceType,
29
29
  )
30
30
  from .openai_types import (
31
+ NOT_GIVEN,
31
32
  ChatCompletion,
32
33
  ChatCompletionAssistantMessageParam,
33
34
  ChatCompletionChunk,
@@ -38,7 +39,9 @@ from .openai_types import (
38
39
  ChatCompletionUserMessageParam,
39
40
  Choice,
40
41
  CompletionUsage,
42
+ NotGiven,
41
43
  )
44
+ from .unified_model_type import UnifiedModelType
42
45
 
43
46
  __all__ = [
44
47
  'RoleType',
@@ -59,11 +62,13 @@ __all__ = [
59
62
  'ChatCompletionAssistantMessageParam',
60
63
  'ChatCompletionFunctionMessageParam',
61
64
  'CompletionUsage',
62
- 'OpenAIVideoType',
63
65
  'OpenAIImageType',
64
66
  'OpenAIVisionDetailType',
65
67
  'OpenAPIName',
66
68
  'ModelPlatformType',
67
69
  'AudioModelType',
68
70
  'VoiceType',
71
+ 'UnifiedModelType',
72
+ 'NOT_GIVEN',
73
+ 'NotGiven',
69
74
  ]
camel/types/enums.py CHANGED
@@ -11,8 +11,10 @@
11
11
  # See the License for the specific language governing permissions and
12
12
  # limitations under the License.
13
13
  # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
- import re
15
14
  from enum import Enum, EnumMeta
15
+ from typing import cast
16
+
17
+ from camel.types.unified_model_type import UnifiedModelType
16
18
 
17
19
 
18
20
  class RoleType(Enum):
@@ -23,7 +25,9 @@ class RoleType(Enum):
23
25
  DEFAULT = "default"
24
26
 
25
27
 
26
- class ModelType(Enum):
28
+ class ModelType(UnifiedModelType, Enum):
29
+ DEFAULT = "gpt-4o-mini"
30
+
27
31
  GPT_3_5_TURBO = "gpt-3.5-turbo"
28
32
  GPT_4 = "gpt-4"
29
33
  GPT_4_TURBO = "gpt-4-turbo"
@@ -33,7 +37,6 @@ class ModelType(Enum):
33
37
  O1_MINI = "o1-mini"
34
38
 
35
39
  GLM_4 = "glm-4"
36
- GLM_4_OPEN_SOURCE = "glm-4-open-source"
37
40
  GLM_4V = 'glm-4v'
38
41
  GLM_3_TURBO = "glm-3-turbo"
39
42
 
@@ -48,13 +51,6 @@ class ModelType(Enum):
48
51
 
49
52
  STUB = "stub"
50
53
 
51
- LLAMA_2 = "llama-2"
52
- LLAMA_3 = "llama-3"
53
- VICUNA = "vicuna"
54
- VICUNA_16K = "vicuna-16k"
55
-
56
- QWEN_2 = "qwen-2"
57
-
58
54
  # Legacy anthropic models
59
55
  # NOTE: anthropic legacy models only Claude 2.1 has system prompt support
60
56
  CLAUDE_2_1 = "claude-2.1"
@@ -75,19 +71,28 @@ class ModelType(Enum):
75
71
  GEMINI_1_5_PRO = "gemini-1.5-pro"
76
72
 
77
73
  # Mistral AI models
78
- MISTRAL_LARGE = "mistral-large-latest"
79
- MISTRAL_NEMO = "open-mistral-nemo"
80
- MISTRAL_CODESTRAL = "codestral-latest"
74
+ MISTRAL_3B = "ministral-3b-latest"
81
75
  MISTRAL_7B = "open-mistral-7b"
76
+ MISTRAL_8B = "ministral-8b-latest"
77
+ MISTRAL_CODESTRAL = "codestral-latest"
78
+ MISTRAL_CODESTRAL_MAMBA = "open-codestral-mamba"
79
+ MISTRAL_LARGE = "mistral-large-latest"
82
80
  MISTRAL_MIXTRAL_8x7B = "open-mixtral-8x7b"
83
81
  MISTRAL_MIXTRAL_8x22B = "open-mixtral-8x22b"
84
- MISTRAL_CODESTRAL_MAMBA = "open-codestral-mamba"
82
+ MISTRAL_NEMO = "open-mistral-nemo"
83
+ MISTRAL_PIXTRAL_12B = "pixtral-12b-2409"
85
84
 
86
85
  # Reka models
87
86
  REKA_CORE = "reka-core"
88
87
  REKA_FLASH = "reka-flash"
89
88
  REKA_EDGE = "reka-edge"
90
89
 
90
+ def __str__(self):
91
+ return self.value
92
+
93
+ def __new__(cls, value) -> "ModelType":
94
+ return cast("ModelType", UnifiedModelType.__new__(cls, value))
95
+
91
96
  @property
92
97
  def value_for_tiktoken(self) -> str:
93
98
  if self.is_openai:
@@ -95,7 +100,7 @@ class ModelType(Enum):
95
100
  return "gpt-4o-mini"
96
101
 
97
102
  @property
98
- def supports_tool_calling(self) -> bool:
103
+ def support_native_tool_calling(self) -> bool:
99
104
  return any([self.is_openai, self.is_gemini, self.is_mistral])
100
105
 
101
106
  @property
@@ -132,18 +137,6 @@ class ModelType(Enum):
132
137
  ModelType.GLM_4V,
133
138
  }
134
139
 
135
- @property
136
- def is_open_source(self) -> bool:
137
- r"""Returns whether this type of models is open-source."""
138
- return self in {
139
- ModelType.LLAMA_2,
140
- ModelType.LLAMA_3,
141
- ModelType.QWEN_2,
142
- ModelType.GLM_4_OPEN_SOURCE,
143
- ModelType.VICUNA,
144
- ModelType.VICUNA_16K,
145
- }
146
-
147
140
  @property
148
141
  def is_anthropic(self) -> bool:
149
142
  r"""Returns whether this type of models is Anthropic-released model.
@@ -186,6 +179,9 @@ class ModelType(Enum):
186
179
  ModelType.MISTRAL_MIXTRAL_8x7B,
187
180
  ModelType.MISTRAL_MIXTRAL_8x22B,
188
181
  ModelType.MISTRAL_CODESTRAL_MAMBA,
182
+ ModelType.MISTRAL_PIXTRAL_12B,
183
+ ModelType.MISTRAL_8B,
184
+ ModelType.MISTRAL_3B,
189
185
  }
190
186
 
191
187
  @property
@@ -206,7 +202,10 @@ class ModelType(Enum):
206
202
  Returns:
207
203
  bool: Whether this type of models is gemini.
208
204
  """
209
- return self in {ModelType.GEMINI_1_5_FLASH, ModelType.GEMINI_1_5_PRO}
205
+ return self in {
206
+ ModelType.GEMINI_1_5_FLASH,
207
+ ModelType.GEMINI_1_5_PRO,
208
+ }
210
209
 
211
210
  @property
212
211
  def is_reka(self) -> bool:
@@ -230,11 +229,7 @@ class ModelType(Enum):
230
229
  """
231
230
  if self is ModelType.GLM_4V:
232
231
  return 1024
233
- elif self is ModelType.VICUNA:
234
- # reference: https://lmsys.org/blog/2023-03-30-vicuna/
235
- return 2048
236
232
  elif self in {
237
- ModelType.LLAMA_2,
238
233
  ModelType.NEMOTRON_4_REWARD,
239
234
  ModelType.STUB,
240
235
  ModelType.REKA_CORE,
@@ -248,15 +243,12 @@ class ModelType(Enum):
248
243
  ModelType.GROQ_LLAMA_3_70B,
249
244
  ModelType.GROQ_GEMMA_7B_IT,
250
245
  ModelType.GROQ_GEMMA_2_9B_IT,
251
- ModelType.LLAMA_3,
252
246
  ModelType.GLM_3_TURBO,
253
247
  ModelType.GLM_4,
254
- ModelType.GLM_4_OPEN_SOURCE,
255
248
  }:
256
249
  return 8_192
257
250
  elif self in {
258
251
  ModelType.GPT_3_5_TURBO,
259
- ModelType.VICUNA_16K,
260
252
  }:
261
253
  return 16_384
262
254
  elif self in {
@@ -268,7 +260,10 @@ class ModelType(Enum):
268
260
  return 32_768
269
261
  elif self in {ModelType.MISTRAL_MIXTRAL_8x22B}:
270
262
  return 64_000
271
- elif self in {ModelType.CLAUDE_2_0, ModelType.CLAUDE_INSTANT_1_2}:
263
+ elif self in {
264
+ ModelType.CLAUDE_2_0,
265
+ ModelType.CLAUDE_INSTANT_1_2,
266
+ }:
272
267
  return 100_000
273
268
  elif self in {
274
269
  ModelType.GPT_4O,
@@ -278,7 +273,9 @@ class ModelType(Enum):
278
273
  ModelType.O1_MINI,
279
274
  ModelType.MISTRAL_LARGE,
280
275
  ModelType.MISTRAL_NEMO,
281
- ModelType.QWEN_2,
276
+ ModelType.MISTRAL_PIXTRAL_12B,
277
+ ModelType.MISTRAL_8B,
278
+ ModelType.MISTRAL_3B,
282
279
  }:
283
280
  return 128_000
284
281
  elif self in {
@@ -299,48 +296,14 @@ class ModelType(Enum):
299
296
  ModelType.MISTRAL_CODESTRAL_MAMBA,
300
297
  }:
301
298
  return 256_000
302
- elif self in {ModelType.GEMINI_1_5_FLASH, ModelType.GEMINI_1_5_PRO}:
299
+ elif self in {
300
+ ModelType.GEMINI_1_5_FLASH,
301
+ ModelType.GEMINI_1_5_PRO,
302
+ }:
303
303
  return 1_048_576
304
304
  else:
305
305
  raise ValueError("Unknown model type")
306
306
 
307
- def validate_model_name(self, model_name: str) -> bool:
308
- r"""Checks whether the model type and the model name matches.
309
-
310
- Args:
311
- model_name (str): The name of the model, e.g. "vicuna-7b-v1.5".
312
-
313
- Returns:
314
- bool: Whether the model type matches the model name.
315
- """
316
- if self is ModelType.VICUNA:
317
- pattern = r'^vicuna-\d+b-v\d+\.\d+$'
318
- return bool(re.match(pattern, model_name))
319
- elif self is ModelType.VICUNA_16K:
320
- pattern = r'^vicuna-\d+b-v\d+\.\d+-16k$'
321
- return bool(re.match(pattern, model_name))
322
- elif self is ModelType.LLAMA_2:
323
- return (
324
- self.value in model_name.lower()
325
- or "llama2" in model_name.lower()
326
- )
327
- elif self is ModelType.LLAMA_3:
328
- return (
329
- self.value in model_name.lower()
330
- or "llama3" in model_name.lower()
331
- )
332
- elif self is ModelType.QWEN_2:
333
- return (
334
- self.value in model_name.lower()
335
- or "qwen2" in model_name.lower()
336
- )
337
- elif self is ModelType.GLM_4_OPEN_SOURCE:
338
- return (
339
- 'glm-4' in model_name.lower() or "glm4" in model_name.lower()
340
- )
341
- else:
342
- return self.value in model_name.lower()
343
-
344
307
 
345
308
  class EmbeddingModelType(Enum):
346
309
  TEXT_EMBEDDING_ADA_2 = "text-embedding-ada-002"
@@ -466,21 +429,21 @@ class OpenAPIName(Enum):
466
429
 
467
430
 
468
431
  class ModelPlatformType(Enum):
432
+ DEFAULT = "openai"
433
+
469
434
  OPENAI = "openai"
470
435
  AZURE = "azure"
471
436
  ANTHROPIC = "anthropic"
472
437
  GROQ = "groq"
473
- OPEN_SOURCE = "open-source"
474
438
  OLLAMA = "ollama"
475
439
  LITELLM = "litellm"
476
440
  ZHIPU = "zhipuai"
477
- DEFAULT = "default"
478
441
  GEMINI = "gemini"
479
442
  VLLM = "vllm"
480
443
  MISTRAL = "mistral"
481
444
  REKA = "reka"
482
445
  TOGETHER = "together"
483
- OPENAI_COMPATIBILITY_MODEL = "openai-compatibility-model"
446
+ OPENAI_COMPATIBLE_MODEL = "openai-compatible-model"
484
447
  SAMBA = "samba-nova"
485
448
 
486
449
  @property
@@ -534,15 +497,10 @@ class ModelPlatformType(Enum):
534
497
  return self is ModelPlatformType.MISTRAL
535
498
 
536
499
  @property
537
- def is_open_source(self) -> bool:
538
- r"""Returns whether this platform is opensource."""
539
- return self is ModelPlatformType.OPEN_SOURCE
540
-
541
- @property
542
- def is_openai_compatibility_model(self) -> bool:
500
+ def is_openai_compatible_model(self) -> bool:
543
501
  r"""Returns whether this is a platform supporting openai
544
502
  compatibility"""
545
- return self is ModelPlatformType.OPENAI_COMPATIBILITY_MODEL
503
+ return self is ModelPlatformType.OPENAI_COMPATIBLE_MODEL
546
504
 
547
505
  @property
548
506
  def is_gemini(self) -> bool:
@@ -31,6 +31,7 @@ from openai.types.chat.chat_completion_user_message_param import (
31
31
  ChatCompletionUserMessageParam,
32
32
  )
33
33
  from openai.types.completion_usage import CompletionUsage
34
+ from openai._types import NOT_GIVEN, NotGiven
34
35
 
35
36
  Choice = Choice
36
37
  ChatCompletion = ChatCompletion
@@ -42,3 +43,5 @@ ChatCompletionUserMessageParam = ChatCompletionUserMessageParam
42
43
  ChatCompletionAssistantMessageParam = ChatCompletionAssistantMessageParam
43
44
  ChatCompletionFunctionMessageParam = ChatCompletionFunctionMessageParam
44
45
  CompletionUsage = CompletionUsage
46
+ NOT_GIVEN = NOT_GIVEN
47
+ NotGiven = NotGiven
@@ -0,0 +1,104 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ import logging
15
+ from threading import Lock
16
+ from typing import TYPE_CHECKING, ClassVar, Dict, Union, cast
17
+
18
+ if TYPE_CHECKING:
19
+ from camel.types import ModelType
20
+
21
+
22
+ class UnifiedModelType(str):
23
+ r"""Class used for support both :obj:`ModelType` and :obj:`str` to be used
24
+ to represent a model type in a unified way. This class is a subclass of
25
+ :obj:`str` so that it can be used as string seamlessly.
26
+
27
+ Args:
28
+ value (Union[ModelType, str]): The value of the model type.
29
+ """
30
+
31
+ _cache: ClassVar[Dict[str, "UnifiedModelType"]] = {}
32
+ _lock: ClassVar[Lock] = Lock()
33
+
34
+ def __new__(cls, value: Union["ModelType", str]) -> "UnifiedModelType":
35
+ with cls._lock:
36
+ if value not in cls._cache:
37
+ instance = super().__new__(cls, value)
38
+ cls._cache[value] = cast(UnifiedModelType, instance)
39
+ else:
40
+ instance = cls._cache[value]
41
+ return instance
42
+
43
+ def __init__(self, value: Union["ModelType", str]) -> None:
44
+ pass
45
+
46
+ @property
47
+ def value_for_tiktoken(self) -> str:
48
+ r"""Returns the model name for TikToken."""
49
+ return "gpt-4o-mini"
50
+
51
+ @property
52
+ def token_limit(self) -> int:
53
+ r"""Returns the token limit for the model. Here we set the default
54
+ value as `999_999_999` if it's not provided from `model_config_dict`"""
55
+ logging.warning(
56
+ "Invalid or missing `max_tokens` in `model_config_dict`. "
57
+ "Defaulting to 999_999_999 tokens."
58
+ )
59
+ return 999_999_999
60
+
61
+ @property
62
+ def is_openai(self) -> bool:
63
+ r"""Returns whether the model is an OpenAI model."""
64
+ return True
65
+
66
+ @property
67
+ def is_anthropic(self) -> bool:
68
+ r"""Returns whether the model is an Anthropic model."""
69
+ return True
70
+
71
+ @property
72
+ def is_azure_openai(self) -> bool:
73
+ r"""Returns whether the model is an Azure OpenAI model."""
74
+ return True
75
+
76
+ @property
77
+ def is_groq(self) -> bool:
78
+ r"""Returns whether the model is a Groq served model."""
79
+ return True
80
+
81
+ @property
82
+ def is_zhipuai(self) -> bool:
83
+ r"""Returns whether the model is a Zhipuai model."""
84
+ return True
85
+
86
+ @property
87
+ def is_gemini(self) -> bool:
88
+ r"""Returns whether the model is a Gemini model."""
89
+ return True
90
+
91
+ @property
92
+ def is_mistral(self) -> bool:
93
+ r"""Returns whether the model is a Mistral model."""
94
+ return True
95
+
96
+ @property
97
+ def is_reka(self) -> bool:
98
+ r"""Returns whether the model is a Reka model."""
99
+ return True
100
+
101
+ @property
102
+ def support_native_tool_calling(self) -> bool:
103
+ r"""Returns whether the model supports native tool calling."""
104
+ return False
camel/utils/__init__.py CHANGED
@@ -43,7 +43,6 @@ from .token_counting import (
43
43
  LiteLLMTokenCounter,
44
44
  MistralTokenCounter,
45
45
  OpenAITokenCounter,
46
- OpenSourceTokenCounter,
47
46
  get_model_encoding,
48
47
  )
49
48
 
@@ -60,7 +59,6 @@ __all__ = [
60
59
  'get_model_encoding',
61
60
  'BaseTokenCounter',
62
61
  'OpenAITokenCounter',
63
- 'OpenSourceTokenCounter',
64
62
  'LiteLLMTokenCounter',
65
63
  'Constants',
66
64
  'text_extract_from_web',
camel/utils/async_func.py CHANGED
@@ -14,20 +14,20 @@
14
14
  import asyncio
15
15
  from copy import deepcopy
16
16
 
17
- from camel.toolkits import OpenAIFunction
17
+ from camel.toolkits import FunctionTool
18
18
 
19
19
 
20
- def sync_funcs_to_async(funcs: list[OpenAIFunction]) -> list[OpenAIFunction]:
20
+ def sync_funcs_to_async(funcs: list[FunctionTool]) -> list[FunctionTool]:
21
21
  r"""Convert a list of Python synchronous functions to Python
22
22
  asynchronous functions.
23
23
 
24
24
  Args:
25
- funcs (list[OpenAIFunction]): List of Python synchronous
26
- functions in the :obj:`OpenAIFunction` format.
25
+ funcs (list[FunctionTool]): List of Python synchronous
26
+ functions in the :obj:`FunctionTool` format.
27
27
 
28
28
  Returns:
29
- list[OpenAIFunction]: List of Python asynchronous functions
30
- in the :obj:`OpenAIFunction` format.
29
+ list[FunctionTool]: List of Python asynchronous functions
30
+ in the :obj:`FunctionTool` format.
31
31
  """
32
32
  async_funcs = []
33
33
  for func in funcs:
@@ -37,6 +37,6 @@ def sync_funcs_to_async(funcs: list[OpenAIFunction]) -> list[OpenAIFunction]:
37
37
  return asyncio.to_thread(sync_func, *args, **kwargs) # noqa: B023
38
38
 
39
39
  async_funcs.append(
40
- OpenAIFunction(async_callable, deepcopy(func.openai_tool_schema))
40
+ FunctionTool(async_callable, deepcopy(func.openai_tool_schema))
41
41
  )
42
42
  return async_funcs