camel-ai 0.1.5.2__py3-none-any.whl → 0.1.5.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of camel-ai might be problematic. Click here for more details.

@@ -22,9 +22,10 @@ from camel.agents import (
22
22
  from camel.generators import SystemMessageGenerator
23
23
  from camel.human import Human
24
24
  from camel.messages import BaseMessage
25
+ from camel.models import BaseModelBackend
25
26
  from camel.prompts import TextPrompt
26
27
  from camel.responses import ChatAgentResponse
27
- from camel.types import ModelType, RoleType, TaskType
28
+ from camel.types import RoleType, TaskType
28
29
 
29
30
 
30
31
  class RolePlaying:
@@ -48,9 +49,9 @@ class RolePlaying:
48
49
  in the loop. (default: :obj:`False`)
49
50
  critic_criteria (str, optional): Critic criteria for the critic agent.
50
51
  If not specified, set the criteria to improve task performance.
51
- model_type (ModelType, optional): Model type that will be used for
52
- role playing. If specified, it will override the model in all
53
- agents. (default: :obj:`None`)
52
+ model (BaseModelBackend, optional): The model backend to use for
53
+ generating responses. If specified, it will override the model in
54
+ all agents. (default: :obj:`None`)
54
55
  task_type (TaskType, optional): The type of task to perform.
55
56
  (default: :obj:`TaskType.AI_SOCIETY`)
56
57
  assistant_agent_kwargs (Dict, optional): Additional arguments to pass
@@ -84,7 +85,7 @@ class RolePlaying:
84
85
  with_task_planner: bool = False,
85
86
  with_critic_in_the_loop: bool = False,
86
87
  critic_criteria: Optional[str] = None,
87
- model_type: Optional[ModelType] = None,
88
+ model: Optional[BaseModelBackend] = None,
88
89
  task_type: TaskType = TaskType.AI_SOCIETY,
89
90
  assistant_agent_kwargs: Optional[Dict] = None,
90
91
  user_agent_kwargs: Optional[Dict] = None,
@@ -99,7 +100,7 @@ class RolePlaying:
99
100
  self.with_task_specify = with_task_specify
100
101
  self.with_task_planner = with_task_planner
101
102
  self.with_critic_in_the_loop = with_critic_in_the_loop
102
- self.model_type = model_type
103
+ self.model = model
103
104
  self.task_type = task_type
104
105
  self.task_prompt = task_prompt
105
106
 
@@ -189,12 +190,10 @@ class RolePlaying:
189
190
  )
190
191
  )
191
192
  task_specify_meta_dict.update(extend_task_specify_meta_dict or {})
192
- if self.model_type is not None:
193
+ if self.model is not None:
193
194
  if task_specify_agent_kwargs is None:
194
195
  task_specify_agent_kwargs = {}
195
- task_specify_agent_kwargs.update(
196
- dict(model_type=self.model_type)
197
- )
196
+ task_specify_agent_kwargs.update(dict(model=self.model))
198
197
  task_specify_agent = TaskSpecifyAgent(
199
198
  task_type=self.task_type,
200
199
  output_language=output_language,
@@ -224,12 +223,10 @@ class RolePlaying:
224
223
  agents. (default: :obj:`None`)
225
224
  """
226
225
  if self.with_task_planner:
227
- if self.model_type is not None:
226
+ if self.model is not None:
228
227
  if task_planner_agent_kwargs is None:
229
228
  task_planner_agent_kwargs = {}
230
- task_planner_agent_kwargs.update(
231
- dict(model_type=self.model_type)
232
- )
229
+ task_planner_agent_kwargs.update(dict(model=self.model))
233
230
  task_planner_agent = TaskPlannerAgent(
234
231
  output_language=output_language,
235
232
  **(task_planner_agent_kwargs or {}),
@@ -321,13 +318,13 @@ class RolePlaying:
321
318
  output_language (str, optional): The language to be output by the
322
319
  agents. (default: :obj:`None`)
323
320
  """
324
- if self.model_type is not None:
321
+ if self.model is not None:
325
322
  if assistant_agent_kwargs is None:
326
323
  assistant_agent_kwargs = {}
327
- assistant_agent_kwargs.update(dict(model_type=self.model_type))
324
+ assistant_agent_kwargs.update(dict(model=self.model))
328
325
  if user_agent_kwargs is None:
329
326
  user_agent_kwargs = {}
330
- user_agent_kwargs.update(dict(model_type=self.model_type))
327
+ user_agent_kwargs.update(dict(model=self.model))
331
328
 
332
329
  self.assistant_agent = ChatAgent(
333
330
  init_assistant_sys_msg,
@@ -383,10 +380,10 @@ class RolePlaying:
383
380
  critic_msg_meta_dict,
384
381
  role_tuple=(critic_role_name, RoleType.CRITIC),
385
382
  )
386
- if self.model_type is not None:
383
+ if self.model is not None:
387
384
  if critic_kwargs is None:
388
385
  critic_kwargs = {}
389
- critic_kwargs.update(dict(model_type=self.model_type))
386
+ critic_kwargs.update(dict(model=self.model))
390
387
  self.critic = CriticAgent(
391
388
  self.critic_sys_msg,
392
389
  **(critic_kwargs or {}),
camel/types/__init__.py CHANGED
@@ -14,6 +14,7 @@
14
14
  from .enums import (
15
15
  AudioModelType,
16
16
  EmbeddingModelType,
17
+ ModelPlatformType,
17
18
  ModelType,
18
19
  OpenAIBackendRole,
19
20
  OpenAIImageType,
@@ -62,6 +63,7 @@ __all__ = [
62
63
  'OpenAIImageType',
63
64
  'OpenAIVisionDetailType',
64
65
  'OpenAPIName',
66
+ 'ModelPlatformType',
65
67
  'AudioModelType',
66
68
  'VoiceType',
67
69
  ]
camel/types/enums.py CHANGED
@@ -45,14 +45,21 @@ class ModelType(Enum):
45
45
  CLAUDE_2_0 = "claude-2.0"
46
46
  CLAUDE_INSTANT_1_2 = "claude-instant-1.2"
47
47
 
48
- # 3 models
48
+ # Claude3 models
49
49
  CLAUDE_3_OPUS = "claude-3-opus-20240229"
50
50
  CLAUDE_3_SONNET = "claude-3-sonnet-20240229"
51
51
  CLAUDE_3_HAIKU = "claude-3-haiku-20240307"
52
52
 
53
+ # Nvidia models
54
+ NEMOTRON_4_REWARD = "nvidia/nemotron-4-340b-reward"
55
+
53
56
  @property
54
57
  def value_for_tiktoken(self) -> str:
55
- return self.value if self is not ModelType.STUB else "gpt-3.5-turbo"
58
+ return (
59
+ self.value
60
+ if self is not ModelType.STUB and not isinstance(self, str)
61
+ else "gpt-3.5-turbo"
62
+ )
56
63
 
57
64
  @property
58
65
  def is_openai(self) -> bool:
@@ -99,6 +106,17 @@ class ModelType(Enum):
99
106
  ModelType.CLAUDE_3_HAIKU,
100
107
  }
101
108
 
109
+ @property
110
+ def is_nvidia(self) -> bool:
111
+ r"""Returns whether this type of models is Nvidia-released model.
112
+
113
+ Returns:
114
+ bool: Whether this type of models is nvidia.
115
+ """
116
+ return self in {
117
+ ModelType.NEMOTRON_4_REWARD,
118
+ }
119
+
102
120
  @property
103
121
  def token_limit(self) -> int:
104
122
  r"""Returns the maximum token limit for a given model.
@@ -130,7 +148,7 @@ class ModelType(Enum):
130
148
  return 2048
131
149
  elif self is ModelType.VICUNA_16K:
132
150
  return 16384
133
- if self in {ModelType.CLAUDE_2_0, ModelType.CLAUDE_INSTANT_1_2}:
151
+ elif self in {ModelType.CLAUDE_2_0, ModelType.CLAUDE_INSTANT_1_2}:
134
152
  return 100_000
135
153
  elif self in {
136
154
  ModelType.CLAUDE_2_1,
@@ -139,6 +157,8 @@ class ModelType(Enum):
139
157
  ModelType.CLAUDE_3_HAIKU,
140
158
  }:
141
159
  return 200_000
160
+ elif self is ModelType.NEMOTRON_4_REWARD:
161
+ return 4096
142
162
  else:
143
163
  raise ValueError("Unknown model type")
144
164
 
@@ -207,6 +227,7 @@ class TaskType(Enum):
207
227
  EVALUATION = "evaluation"
208
228
  SOLUTION_EXTRACTION = "solution_extraction"
209
229
  ROLE_DESCRIPTION = "role_description"
230
+ GENERATE_TEXT_EMBEDDING_DATA = "generate_text_embedding_data"
210
231
  OBJECT_RECOGNITION = "object_recognition"
211
232
  DEFAULT = "default"
212
233
  VIDEO_DESCRIPTION = "video_description"
@@ -279,6 +300,52 @@ class OpenAPIName(Enum):
279
300
  WEB_SCRAPER = "web_scraper"
280
301
 
281
302
 
303
+ class ModelPlatformType(Enum):
304
+ OPENAI = "openai"
305
+ AZURE = "azure"
306
+ ANTHROPIC = "anthropic"
307
+ OPENSOURCE = "opensource"
308
+ OLLAMA = "ollama"
309
+ LITELLM = "litellm"
310
+ ZHIPU = "zhipuai"
311
+ DEFAULT = "default"
312
+
313
+ @property
314
+ def is_openai(self) -> bool:
315
+ r"""Returns whether this platform is openai."""
316
+ return self is ModelPlatformType.OPENAI
317
+
318
+ @property
319
+ def is_azure(self) -> bool:
320
+ r"""Returns whether this platform is azure."""
321
+ return self is ModelPlatformType.AZURE
322
+
323
+ @property
324
+ def is_anthropic(self) -> bool:
325
+ r"""Returns whether this platform is anthropic."""
326
+ return self is ModelPlatformType.ANTHROPIC
327
+
328
+ @property
329
+ def is_ollama(self) -> bool:
330
+ r"""Returns whether this platform is ollama."""
331
+ return self is ModelPlatformType.OLLAMA
332
+
333
+ @property
334
+ def is_litellm(self) -> bool:
335
+ r"""Returns whether this platform is litellm."""
336
+ return self is ModelPlatformType.LITELLM
337
+
338
+ @property
339
+ def is_zhipuai(self) -> bool:
340
+ r"""Returns whether this platform is zhipu."""
341
+ return self is ModelPlatformType.ZHIPU
342
+
343
+ @property
344
+ def is_open_source(self) -> bool:
345
+ r"""Returns whether this platform is opensource."""
346
+ return self is ModelPlatformType.OPENSOURCE
347
+
348
+
282
349
  class AudioModelType(Enum):
283
350
  TTS_1 = "tts-1"
284
351
  TTS_1_HD = "tts-1-hd"
camel/utils/commons.py CHANGED
@@ -62,6 +62,10 @@ def model_api_key_required(func: F) -> F:
62
62
  if not self._api_key and 'ANTHROPIC_API_KEY' not in os.environ:
63
63
  raise ValueError('Anthropic API key not found.')
64
64
  return func(self, *args, **kwargs)
65
+ elif self.model_type.is_nvidia:
66
+ if not self._api_key and 'NVIDIA_API_KEY' not in os.environ:
67
+ raise ValueError('NVIDIA API key not found.')
68
+ return func(self, *args, **kwargs)
65
69
  else:
66
70
  raise ValueError('Unsupported model type.')
67
71
 
@@ -296,9 +296,11 @@ class AnthropicTokenCounter(BaseTokenCounter):
296
296
  Returns:
297
297
  int: Number of tokens in the messages.
298
298
  """
299
- prompt = messages_to_prompt(messages, self.model_type)
300
-
301
- return self.client.count_tokens(prompt)
299
+ num_tokens = 0
300
+ for message in messages:
301
+ content = str(message["content"])
302
+ num_tokens += self.client.count_tokens(content)
303
+ return num_tokens
302
304
 
303
305
 
304
306
  class LiteLLMTokenCounter:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: camel-ai
3
- Version: 0.1.5.2
3
+ Version: 0.1.5.3
4
4
  Summary: Communicative Agents for AI Society Study
5
5
  Home-page: https://www.camel-ai.org/
6
6
  License: Apache-2.0
@@ -186,7 +186,7 @@ conda create --name camel python=3.10
186
186
  conda activate camel
187
187
 
188
188
  # Clone github repo
189
- git clone -b v0.1.5.2 https://github.com/camel-ai/camel.git
189
+ git clone -b v0.1.5.3 https://github.com/camel-ai/camel.git
190
190
 
191
191
  # Change directory into project directory
192
192
  cd camel
@@ -246,54 +246,67 @@ python examples/ai_society/role_playing.py
246
246
  Please note that the environment variable is session-specific. If you open a new terminal window or tab, you will need to set the API key again in that new session.
247
247
 
248
248
 
249
- ## Use Open-Source Models as Backends
249
+ ## Use Open-Source Models as Backends (ex. using Ollama to set Llama 3 locally)
250
250
 
251
- The basic workflow of using an open-sourced model as the backend is based on an external server running LLM inference service, e.g. during the development we chose [FastChat](https://github.com/lm-sys/FastChat) to run the service.
252
-
253
- We do not fix the choice of server to decouple the implementation of any specific LLM inference server with CAMEL (indicating the server needs to be deployed by the user himself). But the server to be deployed must satisfy that **it supports OpenAI-compatible APIs, especially the method `openai.ChatCompletion.create`**.
254
-
255
- Here are some instructions for enabling open-source backends, where we use the [FastChat](https://github.com/lm-sys/FastChat) and a LLaMA2-based model ([`meta-llama/Llama-2-7b-chat-hf`](https://huggingface.co/meta-llama/Llama-2-7b-chat-hf)) in the example. Please install FastChat in advance following their installation guidance.
256
-
257
- 1. Before running CAMEL, we should firstly launch FastChat server following the guidance on https://github.com/lm-sys/FastChat/blob/main/docs/openai_api.md. The instructions summarized below should be kept running **in separate processes**:
258
-
259
- ```sh
260
- # Launch the controller
261
- python -m fastchat.serve.controller
262
-
263
- # Launch the model worker(s)
264
- python3 -m fastchat.serve.model_worker --model-path meta-llama/Llama-2-7b-chat-hf
265
-
266
- # Launch the RESTful API server
267
- python3 -m fastchat.serve.openai_api_server --host localhost --port 8000
268
- ```
251
+ - Download [Ollama](https://ollama.com/download).
252
+ - After setting up Ollama, pull the Llama3 model by typing the following command into the terminal:
253
+ ```bash
254
+ ollama pull llama3
255
+ ```
256
+ - Create a ModelFile similar the one below in your project directory.
257
+ ```bash
258
+ FROM llama3
269
259
 
270
- 2. After observing the controller successfully receiving the heart beat signal from the worker, the server should be ready for use at http://localhost:8000/v1.
260
+ # Set parameters
261
+ PARAMETER temperature 0.8
262
+ PARAMETER stop Result
271
263
 
272
- 3. Then we can try on running `role_playing_with_open_source_model.py`, where each agent in this example is initialized with specifying the `model_path` and `server_url`, similar to the example code below:
264
+ # Sets a custom system message to specify the behavior of the chat assistant
273
265
 
274
- ```python
275
- system_message = # ...
266
+ # Leaving it blank for now.
276
267
 
277
- agent_kwargs = dict(
278
- model=model_type,
279
- model_config=OpenSourceConfig(
280
- model_path="meta-llama/Llama-2-7b-chat-hf",
281
- server_url="http://localhost:8000/v1",
282
- ),
283
- )
268
+ SYSTEM """ """
269
+ ```
270
+ - Create a script to get the base model (llama3) and create a custom model using the ModelFile above. Save this as a .sh file:
271
+ ```bash
272
+ #!/bin/zsh
284
273
 
285
- agent = ChatAgent(
286
- system_message,
287
- **agent_kwargs,
288
- )
289
- ```
274
+ # variables
275
+ model_name="llama3"
276
+ custom_model_name="camel-llama3"
290
277
 
291
- ### Supported Models
278
+ #get the base model
279
+ ollama pull $model_name
292
280
 
293
- - LLaMA2-based models
294
- - example: [meta-llama/Llama-2-7b-chat-hf](https://huggingface.co/meta-llama/Llama-2-7b-chat-hf)
295
- - Vicuna-based models
296
- - example: [lmsys/vicuna-7b-v1.5](https://huggingface.co/lmsys/vicuna-7b-v1.5)
281
+ #create the model file
282
+ ollama create $custom_model_name -f ./Llama3ModelFile
283
+ ```
284
+ - Navigate to the directory where the script and ModelFile are located and run the script. Enjoy your Llama3 model, enhanced by CAMEL's excellent agents.
285
+ ```python
286
+ from camel.agents import ChatAgent
287
+ from camel.messages import BaseMessage
288
+ from camel.models import ModelFactory
289
+ from camel.types import ModelPlatformType
290
+
291
+ ollama_model = ModelFactory.create(
292
+ model_platform=ModelPlatformType.OLLAMA,
293
+ model_type="llama3",
294
+ url="http://localhost:11434/v1",
295
+ model_config_dict={"temperature": 0.4},
296
+ )
297
+
298
+ assistant_sys_msg = BaseMessage.make_assistant_message(
299
+ role_name="Assistant",
300
+ content="You are a helpful assistant.",
301
+ )
302
+ agent = ChatAgent(assistant_sys_msg, model=ollama_model, token_limit=4096)
303
+
304
+ user_msg = BaseMessage.make_user_message(
305
+ role_name="User", content="Say hi to CAMEL"
306
+ )
307
+ assistant_response = agent.step(user_msg)
308
+ print(assistant_response.msg.content)
309
+ ```
297
310
 
298
311
  ## Data (Hosted on Hugging Face)
299
312
  | Dataset | Chat format | Instruction format | Chat format (translated) |
@@ -1,14 +1,14 @@
1
1
  camel/__init__.py,sha256=9lSkzCVy_evUPseFTI7wD_oMEn6wRo9aRZna06ygz5I,778
2
2
  camel/agents/__init__.py,sha256=SSU1wbhZXWwQnE0rRxkpyN57kEu72KklsZNcdLkXfTs,1551
3
3
  camel/agents/base.py,sha256=X39qWSiT1WnDqaJ9k3gQrTpOQSwUKzNEVpp5AY6fDH8,1130
4
- camel/agents/chat_agent.py,sha256=-CZICCtQbsN-hDxZeFc45m-GU9UTWRQWtmv_q0cc5kQ,27713
5
- camel/agents/critic_agent.py,sha256=Etxti9XKOut_KqMQHI8IKNMg8zUUM13trep7axZK0Qs,7377
6
- camel/agents/deductive_reasoner_agent.py,sha256=xuRXe5JkrvU-Qs8qkAQ9MoUYaxOYQ51o9MYw89g0DLA,13309
7
- camel/agents/embodied_agent.py,sha256=E0N63uOkfw02MdPTEX_ImUAzaNuapnPyU3iBJQSqeKU,7322
8
- camel/agents/knowledge_graph_agent.py,sha256=JCNn2XVcQ-lnx6FF9riWAXACDmwIr-cNpJmv9Aa-V9k,8890
9
- camel/agents/role_assignment_agent.py,sha256=npj0l7mhEVVVk3f1ryPlfy_mOeWZi4wyPOi1UpWgLoY,4910
10
- camel/agents/search_agent.py,sha256=FPVZTwxTAy0KqidVMxM8J-XDGiP0uQAz4ie-O0_KVlI,4442
11
- camel/agents/task_agent.py,sha256=Dip1nAd3oGtxVIi3laP65hOp5VwlajAbtotendtNMvs,14907
4
+ camel/agents/chat_agent.py,sha256=yeSSVTnKbRmA7DUFv2waaPz3rMM6z-Gevi0DoYQ8-Uo,27687
5
+ camel/agents/critic_agent.py,sha256=M3XNxRS0wAs5URjc_0kvtXqUlD-KpXq3L5ADz-KCKGU,7199
6
+ camel/agents/deductive_reasoner_agent.py,sha256=8R9hY_yCr_guq_ySuIE3eaYbiPeHVrsh6HKqIWrR0zY,13180
7
+ camel/agents/embodied_agent.py,sha256=Mm2-wvcpduXOvsHMBcavroACyvK06Mxe6QYTf80tdfI,7160
8
+ camel/agents/knowledge_graph_agent.py,sha256=WvKee0ja-vqB4L38pu7c8iqgExSTI7bgHP78D7zOpvY,8770
9
+ camel/agents/role_assignment_agent.py,sha256=IWfu5b2RW1gYziffskErhdmybJOusVvhb9gqLF9_5mw,4800
10
+ camel/agents/search_agent.py,sha256=TMyV2LoBVB0hMnSex6q7xbyLRGsF_EMKxCZ8xbZBX9o,4404
11
+ camel/agents/task_agent.py,sha256=aNpn8bYoe2VlVSlWfbV6ynI5zG9pXq6V5NcppqJGVlU,14253
12
12
  camel/agents/tool_agents/__init__.py,sha256=ulTNWU2qoFGe3pvVmCq_sdfeSX3NKZ0due66TYvsL-M,862
13
13
  camel/agents/tool_agents/base.py,sha256=nQAhfWi8a_bCgzlf5-G-tmj1fKm6AjpRc89NQkWwpnc,1399
14
14
  camel/agents/tool_agents/hugging_face_tool_agent.py,sha256=1Z5tG6f_86eL0vmtRZ-BJvoLDFFLhoHt8JtDvgat1xU,8723
@@ -19,7 +19,7 @@ camel/configs/__init__.py,sha256=9yc5rMGH_FHYCRyC89vmtozNi7RGhN8XHLXxtnIzRh4,117
19
19
  camel/configs/anthropic_config.py,sha256=zD7VMFUw4s7wmBlr64oSXxpEUkhp7wj9mvAd0WK2zFc,3308
20
20
  camel/configs/base_config.py,sha256=CEF8ryl_dkH6LgOhwuP5_EgjaWCUCB-E3GcMWR-2YFE,870
21
21
  camel/configs/litellm_config.py,sha256=6nghqAD1G7nsvW6W56LHpSKEnJyRiCLEcLgtzpPr-ac,5542
22
- camel/configs/openai_config.py,sha256=UKTnpzRcAsQtxgvPJNgLHoMGuXWOrQyJsz0MBNsjALM,6603
22
+ camel/configs/openai_config.py,sha256=tFEiPDQ8Cdvkfds83T7_5osNikwA3NuRGbpjV0wq4Ao,7593
23
23
  camel/embeddings/__init__.py,sha256=9TI7392iYxrlYYerPoboDBOFvpEmP_nSSgtEjks1vJQ,1034
24
24
  camel/embeddings/base.py,sha256=nauXLNEJlPnk2sKigFzvNTk_RKsC_2l_EQiyPyj_ATo,2208
25
25
  camel/embeddings/openai_embedding.py,sha256=nlBIlbTqps34OT-ydrA1CUYOOZMJqbNSsqyjCx-16wM,2885
@@ -28,7 +28,7 @@ camel/embeddings/vlm_embedding.py,sha256=VvD_b737snNrZTRE4ejFvWLjd_YT1DCTKl8yKIg
28
28
  camel/functions/__init__.py,sha256=3d1ZI3xx67DvHeBQhQQAu7IwTohC6Sa-_EPZeDE8_50,1737
29
29
  camel/functions/google_maps_function.py,sha256=AmhlIyqkrkZF6Vb4O-wdtEKTQjRh5mMjHjS56ciGgjk,12468
30
30
  camel/functions/math_functions.py,sha256=sPHSEOdHOmL38wZWcdyiBj0VEmf7mhQ0MBzya1SFNL0,1703
31
- camel/functions/open_api_function.py,sha256=W_xm4ZFRNWwjQVX-SyWfznRZnvq_T3F7ltw3qJ2Epx0,20312
31
+ camel/functions/open_api_function.py,sha256=QW0zTIGxXT1h-JWIK1iAKsqvDXXX5FrIwHBJg8i6N4g,20518
32
32
  camel/functions/open_api_specs/biztoc/__init__.py,sha256=f3LXNDzN2XWWoF2D0nesG8VuEA6Zd14i2aiTDbCm5bA,708
33
33
  camel/functions/open_api_specs/biztoc/ai-plugin.json,sha256=IJinQbLv5MFPGFwdN7PbOhwArFVExSEZdJspe-mOBIo,866
34
34
  camel/functions/open_api_specs/biztoc/openapi.yaml,sha256=SQ2bYIWb1nVBtkBeFaOihiWQ71oZ2bzz0fCgu6igM8A,610
@@ -57,7 +57,7 @@ camel/functions/open_api_specs/web_scraper/paths/scraper.py,sha256=SQGbFkshLN4xm
57
57
  camel/functions/openai_function.py,sha256=NyN8LBKdNeWizR7SnOp6VwEQhq29OJgskFfXq8EzIFg,14948
58
58
  camel/functions/retrieval_functions.py,sha256=ZBwQhBeun86k6AnMDCpf0U-JYNaU0alDJAS1hdnumAQ,2281
59
59
  camel/functions/search_functions.py,sha256=tj2eM7Jc70a5tsZhKr3mWczUbK9saUAJxXTrlWKBAv8,11151
60
- camel/functions/slack_functions.py,sha256=wY838LhFRCEFbhm-R6ImJz7qNT9a74pKaK3oVv0j2ts,8808
60
+ camel/functions/slack_functions.py,sha256=nQ9qzaswzQ1wEFVapWzgZ2-zF2oVXNMJQ50zxBPaz_g,9100
61
61
  camel/functions/twitter_function.py,sha256=xL-GKU69WrcTUm3lQl1yPgJFxtBJKRmWN3zx9AfGNKI,17254
62
62
  camel/functions/weather_functions.py,sha256=W2jMFqxq5M7Dan7cgEpnvzBk0SV_MduMbCuvHBsgo-c,5881
63
63
  camel/generators.py,sha256=tcYDoHwSKN0rBiu7u4rWN9pb61O8OaclrNaasCqHSJM,10437
@@ -82,28 +82,31 @@ camel/memories/records.py,sha256=zmZsYHVuq6fYqJDkzhNXF02uWLzdBemaEZeG0Ls90pU,361
82
82
  camel/messages/__init__.py,sha256=djLvpz6AmjeLzuUSQl7J6T2O4x8MwSdcH0l9fbj_3yg,1468
83
83
  camel/messages/base.py,sha256=1cyYITXxBsp2UCdOjF1Ky4W_PgRegEfitqbrF9PjUPs,13721
84
84
  camel/messages/func_message.py,sha256=CCVkbz-2pdxXV0vBETI0xt7d7uiN8zACpRI7lCnfTFQ,3841
85
- camel/models/__init__.py,sha256=AHlcSSVnw3NK7ngqVoxPFg-m5Ts3goyshqtwNJIsQ2U,1288
86
- camel/models/anthropic_model.py,sha256=N0oM9uRn3lV-UxSlhBmM08p0ZsQTjPWaEYI24XzyAPE,5286
87
- camel/models/base_model.py,sha256=_TPcJrH4hbX6Iq6SBRUJufBp_Mgzkye1terqstu7aIQ,3906
85
+ camel/models/__init__.py,sha256=RfAHcSSaBUAilObQIU07uICxaujjeO5EwLv4Pg0vrPc,1408
86
+ camel/models/anthropic_model.py,sha256=Gaf3xVcUa3OqFHWOyi3qhiG9LIbqozhdtmJNHDGomQY,5412
87
+ camel/models/base_model.py,sha256=TMbS44Fn-6m0OlrxYCtvwKqGUM_4Jz2y6kX-P28nOeI,4030
88
88
  camel/models/litellm_model.py,sha256=_f61yTPFNMrdAAKuDokLoohjuhECcwjdOjLmDoxgNJk,3942
89
- camel/models/model_factory.py,sha256=sM67tlxriaZjn5xcIR-yK12ubk4EBMi_kRdl36o8eU4,2664
90
- camel/models/open_source_model.py,sha256=3t3Lrdx9T-g94k3MrRRn7dDQqJHqFoFRBi-yXj5RIEw,5857
89
+ camel/models/model_factory.py,sha256=786bR7cCdqK4V5i4wUJ20PC6cKPyX9AQN7zE8PBeW0s,3878
90
+ camel/models/nemotron_model.py,sha256=qaRbqEPFYR2Chp4SqdiNOOZvPjFfbLKZiQdGMEg74bc,2443
91
+ camel/models/ollama_model.py,sha256=Z65KZ4R_GPV3ezAORzNQIZmu28fCF13Lb01nGwQYuMk,4488
92
+ camel/models/open_source_model.py,sha256=r8TGq-9xAwOANZ5s_y3fJUGAvS0zDg03RmbZ8X2ga-E,6156
91
93
  camel/models/openai_audio_models.py,sha256=Jpd_B-5R8d8s3qYo_0x-Ahw5icMCysGEjaDtssV34dg,9799
92
- camel/models/openai_model.py,sha256=cqbfcZZEDOBUqJRT5oTo_ohNMxAUXXOg-dm7kTiuilE,4193
93
- camel/models/stub_model.py,sha256=hbfS6A83vY4TaFK_7Q9jA4Gbrkp0Hlmq2IEZ1z81L8E,3631
94
+ camel/models/openai_model.py,sha256=-pW1dtDkP1WemGIzgxkYeEZ6kzGpgsFsGuF069HIeIo,4356
95
+ camel/models/stub_model.py,sha256=kyFXy9WyHgjnXDFO8Sag4q023lHGu4D0vyzfkGTSi9w,3704
94
96
  camel/models/zhipuai_model.py,sha256=DMpmwn6eCXwof1ASvih3NTfGxTlt5YstGEC6qrQqRJE,4484
95
- camel/prompts/__init__.py,sha256=xzt5NBo2tZ_35SvzUk6OExKzrSEKZJMZeYszOSPW2ec,1903
97
+ camel/prompts/__init__.py,sha256=tvN1pz132rgjV_C4MoVrSMTqgtOP0SzdfzAPX8rjpaQ,2049
96
98
  camel/prompts/ai_society.py,sha256=ApgvIED1Z_mdsWDNc2_u35Ktp7pEKksMrOIQKo_q5cI,6306
97
99
  camel/prompts/base.py,sha256=VMde6w97zHPP03OA628wGwXhtJweoccOK1B1f3aESDo,8464
98
100
  camel/prompts/code.py,sha256=vrv2mPjlakPlqVLQt_rA1veP79EN1t3iM41bkACrc9I,5865
99
101
  camel/prompts/descripte_video_prompt.py,sha256=lyJlVN1wjrY77Cv8U3NdDpmtlyIXs1wHYRhsrgLPcuY,1295
100
102
  camel/prompts/evaluation.py,sha256=4zm5ZVy3CSb2NdFWnS43ejK8Cu_pU8iUIj06ofpuZpg,1596
103
+ camel/prompts/generate_text_embedding_data.py,sha256=S0D0S99OAixDh_jp3sfFbeRJjffLutmyUd-vryqV7ho,4246
101
104
  camel/prompts/misalignment.py,sha256=aL3W5WvTJBfF-1vWQse_tn3zAOaezHGU510HLs0AlQo,4537
102
105
  camel/prompts/object_recognition.py,sha256=L_YM_c8AxwO6MvwuUdeuluwhBPXedNxNIzOv5yF9Dag,1422
103
106
  camel/prompts/prompt_templates.py,sha256=PeOp_eUgyZyJ7BCwA2cvSx8O3QPu9ftjgaZ6Al8zlJQ,4134
104
107
  camel/prompts/role_description_prompt_template.py,sha256=k9p3NlxY1MWKzhoRpeQeuz0oHDQYo63WoPdWcUmHr_A,2544
105
108
  camel/prompts/solution_extraction.py,sha256=5vTSaeQoBSvaur3cKgqQ9kLxSA5QIOBI4OPQzXWbQFg,2109
106
- camel/prompts/task_prompt_template.py,sha256=vvWwuWkQ7SX9PR_2DtSr437JF5G-JPaanhWE0_GBug0,2816
109
+ camel/prompts/task_prompt_template.py,sha256=2Z4kMoT1IiMqcXKIEl8Tro-5pbQJqDmLaXi8hqkxPOk,3040
107
110
  camel/prompts/translation.py,sha256=V_40Ko2is5dAOCZ8DzsHo6DO7l8_jnEV9KjCKH7GxtY,1902
108
111
  camel/responses/__init__.py,sha256=edtTQskOgq5obyITziRFL62HTJP9sAikAtP9vrFacEQ,795
109
112
  camel/responses/agent_responses.py,sha256=UsTZHi4jPs2wfChPQWttVNyHneoGdQtdrRouatywE4w,1714
@@ -115,7 +118,7 @@ camel/retrievers/cohere_rerank_retriever.py,sha256=jDAo5OxAWZBQhgH9cSo0_3h057AN4
115
118
  camel/retrievers/vector_retriever.py,sha256=PhPIUyjffOojwYiATEY1lsCQO9yDmpc8k-R4sAt5IvY,7316
116
119
  camel/societies/__init__.py,sha256=JhGwUHjht4CewzC3shKuxmgB3oS7FIxIxmiKyhNsfIs,832
117
120
  camel/societies/babyagi_playing.py,sha256=0sDe65XbGGWQOe4I758sH-sAk1Hf82Y_qawjaEbbBXE,11791
118
- camel/societies/role_playing.py,sha256=WIV_eGXw8pzGsOGnqhTsQqOWQxwe-Snwd_t162AALxI,22100
121
+ camel/societies/role_playing.py,sha256=C5hQIPH8gwP7_dkh65nOPplsw50lYQiYXk-aapODqMY,21983
119
122
  camel/storages/__init__.py,sha256=crRaZKmgvs8RCzfffREYIVo09J4q_HVu44JGb4CJMfo,1480
120
123
  camel/storages/graph_storages/__init__.py,sha256=vsJZkedaCS-cLQ-KgMqio8cxXvbousBWVqzZJvlimT8,897
121
124
  camel/storages/graph_storages/base.py,sha256=-Ys1BIuz4H5FvYMZTBIjg8Cfv40CPQ-OsovwMzygEgU,2858
@@ -136,13 +139,13 @@ camel/terminators/token_limit_terminator.py,sha256=mK30wVUnoqNAvIo-wxkqY5gUSNay2
136
139
  camel/toolkits/__init__.py,sha256=2-z9eGt53U6_1uwMtiu0-GgU7k5iFkS3HEMXuB3Qs2A,836
137
140
  camel/toolkits/base.py,sha256=znjnZtgxA5gbT7OMnrKQF_a9FK3A7Xk5s_lP94u76vI,923
138
141
  camel/toolkits/github_toolkit.py,sha256=ZBShnR7Vc3xQhQiTHsiF2esyBn7JEJxldvlgg-Cnsgk,11631
139
- camel/types/__init__.py,sha256=D8ITJYnJF6PvJiWQB35MIBbPbX5AzYB_hpqbkprXSvY,1927
140
- camel/types/enums.py,sha256=DpGMfhVTu5bYnI8ts52-Rl5FRIGRybADx_zSl702afY,9012
142
+ camel/types/__init__.py,sha256=ArKXATj3z_Vv4ISmROVeo6Mv3tj5kE1dTkqfgwyxVY4,1975
143
+ camel/types/enums.py,sha256=qcpDy5pnI_-5LoQyWtRQeVID5X2bSCFQv7NFUjFY1lE,10909
141
144
  camel/types/openai_types.py,sha256=BNQ6iCzKTjSvgcXFsAFIgrUS_YUFZBU6bDoyAp387hI,2045
142
145
  camel/utils/__init__.py,sha256=qFRBTiHE_QXYDSQoNecobtJYSFuNHEULx7nr00Q1S6Y,1827
143
- camel/utils/commons.py,sha256=hFEAhmyYwYPVZVYFBbmGepEC_wXfHm5umkiMUHOuckw,11888
146
+ camel/utils/commons.py,sha256=1p50ci5bnX1sQdJG_bewGbG6WKwR220YrCr3yoJmtu0,12110
144
147
  camel/utils/constants.py,sha256=ZIw5ILfOyJFyjEAYrbJMANeg1_EZI-zMK_xVrkwALbM,1105
145
- camel/utils/token_counting.py,sha256=lFLghaWeD3HdHHXnebYdkAs7leIQQjmmlsbXNdjaZTo,14334
146
- camel_ai-0.1.5.2.dist-info/METADATA,sha256=trDmJAGhrOEzgT3uHycAym4g_n_XduB-M-9O9EHNWSw,22016
147
- camel_ai-0.1.5.2.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
148
- camel_ai-0.1.5.2.dist-info/RECORD,,
148
+ camel/utils/token_counting.py,sha256=_eX314B0ikffz76bUwnN7xbLGnHJ3rezvesxwPNwfQ0,14410
149
+ camel_ai-0.1.5.3.dist-info/METADATA,sha256=dQemzv34rEPO-VZ7CEswERffe8-Ja_MZgZ0rdWk9G0k,21608
150
+ camel_ai-0.1.5.3.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
151
+ camel_ai-0.1.5.3.dist-info/RECORD,,