camel-ai 0.2.17__py3-none-any.whl → 0.2.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of camel-ai might be problematic. Click here for more details.

@@ -154,7 +154,7 @@ class FunctionCallingMessage(BaseMessage):
154
154
  " due to missing function name."
155
155
  )
156
156
 
157
- result_content = json.dumps(self.result)
157
+ result_content = str(self.result)
158
158
 
159
159
  return {
160
160
  "role": "tool",
@@ -18,6 +18,7 @@ from typing import Any, Dict, List, Optional, Union
18
18
  from openai import OpenAI, Stream
19
19
 
20
20
  from camel.configs import DEEPSEEK_API_PARAMS, DeepSeekConfig
21
+ from camel.logger import get_logger
21
22
  from camel.messages import OpenAIMessage
22
23
  from camel.models.base_model import BaseModelBackend
23
24
  from camel.types import (
@@ -27,6 +28,8 @@ from camel.types import (
27
28
  )
28
29
  from camel.utils import BaseTokenCounter, OpenAITokenCounter, api_keys_required
29
30
 
31
+ logger = get_logger(__name__)
32
+
30
33
 
31
34
  class DeepSeekModel(BaseModelBackend):
32
35
  r"""DeepSeek API in a unified BaseModelBackend interface.
@@ -110,11 +113,93 @@ class DeepSeekModel(BaseModelBackend):
110
113
  `ChatCompletion` in the non-stream mode, or
111
114
  `Stream[ChatCompletionChunk]` in the stream mode.
112
115
  """
116
+ # deepseek reasoner has limitations
117
+ # reference: https://api-docs.deepseek.com/guides/reasoning_model#api-parameters
118
+ if self.model_type in [
119
+ ModelType.DEEPSEEK_REASONER,
120
+ ]:
121
+ import re
122
+
123
+ logger.warning(
124
+ "You are using a DeepSeek Reasoner model, "
125
+ "which has certain limitations, reference: "
126
+ "`https://api-docs.deepseek.com/guides/reasoning_model#api-parameters`"
127
+ )
128
+
129
+ # Check and remove unsupported parameters and reset the fixed
130
+ # parameters
131
+ unsupported_keys = [
132
+ "temperature",
133
+ "top_p",
134
+ "presence_penalty",
135
+ "frequency_penalty",
136
+ "logprobs",
137
+ "top_logprobs",
138
+ "tools",
139
+ ]
140
+ for key in unsupported_keys:
141
+ if key in self.model_config_dict:
142
+ del self.model_config_dict[key]
143
+
144
+ # Remove thinking content from messages before sending to API
145
+ # This ensures only the final response is sent, excluding
146
+ # intermediate thought processes
147
+ messages = [
148
+ { # type: ignore[misc]
149
+ **msg,
150
+ 'content': re.sub(
151
+ r'<think>.*?</think>',
152
+ '',
153
+ msg['content'], # type: ignore[arg-type]
154
+ flags=re.DOTALL,
155
+ ).strip(),
156
+ }
157
+ for msg in messages
158
+ ]
159
+
113
160
  response = self._client.chat.completions.create(
114
161
  messages=messages,
115
162
  model=self.model_type,
116
163
  **self.model_config_dict,
117
164
  )
165
+
166
+ # Handle reasoning content with <think> tags at the beginning
167
+ if (
168
+ self.model_type
169
+ in [
170
+ ModelType.DEEPSEEK_REASONER,
171
+ ]
172
+ and os.environ.get("GET_REASONING_CONTENT", "false").lower()
173
+ == "true"
174
+ ):
175
+ reasoning_content = response.choices[0].message.reasoning_content
176
+ combined_content = (
177
+ f"<think>\n{reasoning_content}\n</think>\n"
178
+ if reasoning_content
179
+ else ""
180
+ ) + response.choices[0].message.content
181
+
182
+ response = ChatCompletion.construct(
183
+ id=response.id,
184
+ choices=[
185
+ dict(
186
+ index=response.choices[0].index,
187
+ message={
188
+ "role": response.choices[0].message.role,
189
+ "content": combined_content,
190
+ "tool_calls": None,
191
+ },
192
+ finish_reason=response.choices[0].finish_reason
193
+ if response.choices[0].finish_reason
194
+ else None,
195
+ )
196
+ ],
197
+ created=response.created,
198
+ model=response.model,
199
+ object="chat.completion",
200
+ usage=response.usage,
201
+ )
202
+
118
203
  return response
119
204
 
120
205
  def check_model_config(self):
@@ -88,8 +88,6 @@ class GroqModel(BaseModelBackend):
88
88
  BaseTokenCounter: The token counter following the model's
89
89
  tokenization style.
90
90
  """
91
- # Make sure you have the access to these open-source model in
92
- # HuggingFace
93
91
  if not self._token_counter:
94
92
  self._token_counter = OpenAITokenCounter(ModelType.GPT_4O_MINI)
95
93
  return self._token_counter
@@ -21,7 +21,6 @@ from camel.configs import OPENAI_API_PARAMS, ChatGPTConfig
21
21
  from camel.messages import OpenAIMessage
22
22
  from camel.models import BaseModelBackend
23
23
  from camel.types import (
24
- NOT_GIVEN,
25
24
  ChatCompletion,
26
25
  ChatCompletionChunk,
27
26
  ModelType,
@@ -112,6 +111,7 @@ class OpenAIModel(BaseModelBackend):
112
111
  ModelType.O1,
113
112
  ModelType.O1_MINI,
114
113
  ModelType.O1_PREVIEW,
114
+ ModelType.O3_MINI,
115
115
  ]:
116
116
  warnings.warn(
117
117
  "Warning: You are using an O1 model (O1_MINI or O1_PREVIEW), "
@@ -148,14 +148,6 @@ class OpenAIModel(BaseModelBackend):
148
148
 
149
149
  return self._to_chat_completion(response)
150
150
 
151
- # Removing 'strict': True from the dictionary for
152
- # client.chat.completions.create
153
- if self.model_config_dict.get('tools') is not NOT_GIVEN:
154
- for tool in self.model_config_dict.get('tools', []):
155
- function_dict = tool.get('function', {})
156
- if 'strict' in function_dict:
157
- del function_dict['strict']
158
-
159
151
  response = self._client.chat.completions.create(
160
152
  messages=messages,
161
153
  model=self.model_type,
@@ -13,10 +13,9 @@
13
13
  # ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
14
14
  import os
15
15
  import xml.etree.ElementTree as ET
16
- from typing import Any, Dict, List, Literal, Optional, Type, TypeAlias, Union
16
+ from typing import Any, Dict, List, Literal, Optional, TypeAlias, Union
17
17
 
18
18
  import requests
19
- from pydantic import BaseModel
20
19
 
21
20
  from camel.toolkits.base import BaseToolkit
22
21
  from camel.toolkits.function_tool import FunctionTool
@@ -77,7 +76,7 @@ class SearchToolkit(BaseToolkit):
77
76
  output_type: Literal[
78
77
  "searchResults", "sourcedAnswer", "structured"
79
78
  ] = "searchResults",
80
- structured_output_schema: Union[Type[BaseModel], str, None] = None,
79
+ structured_output_schema: Optional[str] = None,
81
80
  ) -> Dict[str, Any]:
82
81
  r"""Search for a query in the Linkup API and return results in various
83
82
  formats.
@@ -92,9 +91,9 @@ class SearchToolkit(BaseToolkit):
92
91
  - "searchResults" for raw search results,
93
92
  - "sourcedAnswer" for an answer with supporting sources,
94
93
  - "structured" for output based on a provided schema.
95
- structured_output_schema (Union[Type[BaseModel], str, None]): If
96
- `output_type` is "structured",specify the schema of the
97
- output. Can be a Pydantic BaseModel or a JSON schema string.
94
+ structured_output_schema (Optional[str]): If `output_type` is
95
+ "structured", specify the schema of the output. Must be a
96
+ string representing a valid object JSON schema.
98
97
 
99
98
  Returns:
100
99
  Dict[str, Any]: A dictionary representing the search result. The
camel/types/enums.py CHANGED
@@ -37,6 +37,7 @@ class ModelType(UnifiedModelType, Enum):
37
37
  O1 = "o1"
38
38
  O1_PREVIEW = "o1-preview"
39
39
  O1_MINI = "o1-mini"
40
+ O3_MINI = "o3-mini"
40
41
 
41
42
  GLM_4 = "glm-4"
42
43
  GLM_4V = 'glm-4v'
@@ -44,16 +45,37 @@ class ModelType(UnifiedModelType, Enum):
44
45
 
45
46
  # Groq platform models
46
47
  GROQ_LLAMA_3_1_8B = "llama-3.1-8b-instant"
47
- GROQ_LLAMA_3_1_70B = "llama-3.1-70b-versatile"
48
- GROQ_LLAMA_3_1_405B = "llama-3.1-405b-reasoning"
49
48
  GROQ_LLAMA_3_3_70B = "llama-3.3-70b-versatile"
50
49
  GROQ_LLAMA_3_3_70B_PREVIEW = "llama-3.3-70b-specdec"
51
50
  GROQ_LLAMA_3_8B = "llama3-8b-8192"
52
51
  GROQ_LLAMA_3_70B = "llama3-70b-8192"
53
52
  GROQ_MIXTRAL_8_7B = "mixtral-8x7b-32768"
54
- GROQ_GEMMA_7B_IT = "gemma-7b-it"
55
53
  GROQ_GEMMA_2_9B_IT = "gemma2-9b-it"
56
54
 
55
+ # TogetherAI platform models support tool calling
56
+ TOGETHER_LLAMA_3_1_8B = "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo"
57
+ TOGETHER_LLAMA_3_1_70B = "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo"
58
+ TOGETHER_LLAMA_3_1_405B = "meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo"
59
+ TOGETHER_LLAMA_3_3_70B = "meta-llama/Llama-3.3-70B-Instruct-Turbo"
60
+ TOGETHER_MIXTRAL_8_7B = "mistralai/Mixtral-8x7B-Instruct-v0.1"
61
+ TOGETHER_MISTRAL_7B = "mistralai/Mistral-7B-Instruct-v0.1"
62
+
63
+ # SambaNova Cloud platform models support tool calling
64
+ SAMBA_LLAMA_3_1_8B = "Meta-Llama-3.1-8B-Instruct"
65
+ SAMBA_LLAMA_3_1_70B = "Meta-Llama-3.1-70B-Instruct"
66
+ SAMBA_LLAMA_3_1_405B = "Meta-Llama-3.1-405B-Instruct"
67
+
68
+ # SGLang models support tool calling
69
+ SGLANG_LLAMA_3_1_8B = "meta-llama/Meta-Llama-3.1-8B-Instruct"
70
+ SGLANG_LLAMA_3_1_70B = "meta-llama/Meta-Llama-3.1-70B-Instruct"
71
+ SGLANG_LLAMA_3_1_405B = "meta-llama/Meta-Llama-3.1-405B-Instruct"
72
+ SGLANG_LLAMA_3_2_1B = "meta-llama/Llama-3.2-1B-Instruct"
73
+ SGLANG_MIXTRAL_NEMO = "mistralai/Mistral-Nemo-Instruct-2407"
74
+ SGLANG_MISTRAL_7B = "mistralai/Mistral-7B-Instruct-v0.3"
75
+ SGLANG_QWEN_2_5_7B = "Qwen/Qwen2.5-7B-Instruct"
76
+ SGLANG_QWEN_2_5_32B = "Qwen/Qwen2.5-32B-Instruct"
77
+ SGLANG_QWEN_2_5_72B = "Qwen/Qwen2.5-72B-Instruct"
78
+
57
79
  STUB = "stub"
58
80
 
59
81
  # Legacy anthropic models
@@ -141,7 +163,7 @@ class ModelType(UnifiedModelType, Enum):
141
163
 
142
164
  # DeepSeek models
143
165
  DEEPSEEK_CHAT = "deepseek-chat"
144
-
166
+ DEEPSEEK_REASONER = "deepseek-reasoner"
145
167
  # InternLM models
146
168
  INTERNLM3_LATEST = "internlm3-latest"
147
169
  INTERNLM3_8B_INSTRUCT = "internlm3-8b-instruct"
@@ -175,6 +197,10 @@ class ModelType(UnifiedModelType, Enum):
175
197
  self.is_deepseek,
176
198
  self.is_cohere,
177
199
  self.is_internlm,
200
+ self.is_together,
201
+ self.is_sambanova,
202
+ self.is_groq,
203
+ self.is_sglang,
178
204
  ]
179
205
  )
180
206
 
@@ -190,6 +216,7 @@ class ModelType(UnifiedModelType, Enum):
190
216
  ModelType.O1,
191
217
  ModelType.O1_PREVIEW,
192
218
  ModelType.O1_MINI,
219
+ ModelType.O3_MINI,
193
220
  }
194
221
 
195
222
  @property
@@ -237,17 +264,35 @@ class ModelType(UnifiedModelType, Enum):
237
264
  r"""Returns whether this type of models is served by Groq."""
238
265
  return self in {
239
266
  ModelType.GROQ_LLAMA_3_1_8B,
240
- ModelType.GROQ_LLAMA_3_1_70B,
241
- ModelType.GROQ_LLAMA_3_1_405B,
242
267
  ModelType.GROQ_LLAMA_3_3_70B,
243
268
  ModelType.GROQ_LLAMA_3_3_70B_PREVIEW,
244
269
  ModelType.GROQ_LLAMA_3_8B,
245
270
  ModelType.GROQ_LLAMA_3_70B,
246
271
  ModelType.GROQ_MIXTRAL_8_7B,
247
- ModelType.GROQ_GEMMA_7B_IT,
248
272
  ModelType.GROQ_GEMMA_2_9B_IT,
249
273
  }
250
274
 
275
+ @property
276
+ def is_together(self) -> bool:
277
+ r"""Returns whether this type of models is served by Together AI."""
278
+ return self in {
279
+ ModelType.TOGETHER_LLAMA_3_1_405B,
280
+ ModelType.TOGETHER_LLAMA_3_1_70B,
281
+ ModelType.TOGETHER_LLAMA_3_3_70B,
282
+ ModelType.TOGETHER_LLAMA_3_3_70B,
283
+ ModelType.TOGETHER_MISTRAL_7B,
284
+ ModelType.TOGETHER_MIXTRAL_8_7B,
285
+ }
286
+
287
+ @property
288
+ def is_sambanova(self) -> bool:
289
+ r"""Returns whether this type of models is served by SambaNova AI."""
290
+ return self in {
291
+ ModelType.SAMBA_LLAMA_3_1_8B,
292
+ ModelType.SAMBA_LLAMA_3_1_70B,
293
+ ModelType.SAMBA_LLAMA_3_1_405B,
294
+ }
295
+
251
296
  @property
252
297
  def is_mistral(self) -> bool:
253
298
  r"""Returns whether this type of models is served by Mistral."""
@@ -365,6 +410,7 @@ class ModelType(UnifiedModelType, Enum):
365
410
  def is_deepseek(self) -> bool:
366
411
  return self in {
367
412
  ModelType.DEEPSEEK_CHAT,
413
+ ModelType.DEEPSEEK_REASONER,
368
414
  }
369
415
 
370
416
  @property
@@ -376,6 +422,20 @@ class ModelType(UnifiedModelType, Enum):
376
422
  ModelType.INTERNLM2_PRO_CHAT,
377
423
  }
378
424
 
425
+ @property
426
+ def is_sglang(self) -> bool:
427
+ return self in {
428
+ ModelType.SGLANG_LLAMA_3_1_8B,
429
+ ModelType.SGLANG_LLAMA_3_1_70B,
430
+ ModelType.SGLANG_LLAMA_3_1_405B,
431
+ ModelType.SGLANG_LLAMA_3_2_1B,
432
+ ModelType.SGLANG_MIXTRAL_NEMO,
433
+ ModelType.SGLANG_MISTRAL_7B,
434
+ ModelType.SGLANG_QWEN_2_5_7B,
435
+ ModelType.SGLANG_QWEN_2_5_32B,
436
+ ModelType.SGLANG_QWEN_2_5_72B,
437
+ }
438
+
379
439
  @property
380
440
  def token_limit(self) -> int:
381
441
  r"""Returns the maximum token limit for a given model.
@@ -403,12 +463,12 @@ class ModelType(UnifiedModelType, Enum):
403
463
  ModelType.GROQ_LLAMA_3_8B,
404
464
  ModelType.GROQ_LLAMA_3_70B,
405
465
  ModelType.GROQ_LLAMA_3_3_70B_PREVIEW,
406
- ModelType.GROQ_GEMMA_7B_IT,
407
466
  ModelType.GROQ_GEMMA_2_9B_IT,
408
467
  ModelType.GLM_3_TURBO,
409
468
  ModelType.GLM_4,
410
469
  ModelType.QWEN_VL_PLUS,
411
470
  ModelType.NVIDIA_LLAMA3_70B,
471
+ ModelType.TOGETHER_MISTRAL_7B,
412
472
  }:
413
473
  return 8_192
414
474
  elif self in {
@@ -419,6 +479,8 @@ class ModelType(UnifiedModelType, Enum):
419
479
  ModelType.YI_VISION,
420
480
  ModelType.YI_SPARK,
421
481
  ModelType.YI_LARGE_RAG,
482
+ ModelType.SAMBA_LLAMA_3_1_8B,
483
+ ModelType.SAMBA_LLAMA_3_1_405B,
422
484
  }:
423
485
  return 16_384
424
486
  elif self in {
@@ -438,11 +500,14 @@ class ModelType(UnifiedModelType, Enum):
438
500
  ModelType.INTERNLM3_LATEST,
439
501
  ModelType.INTERNLM2_5_LATEST,
440
502
  ModelType.INTERNLM2_PRO_CHAT,
503
+ ModelType.TOGETHER_MIXTRAL_8_7B,
504
+ ModelType.SGLANG_MISTRAL_7B,
441
505
  }:
442
506
  return 32_768
443
507
  elif self in {
444
508
  ModelType.MISTRAL_MIXTRAL_8x22B,
445
509
  ModelType.DEEPSEEK_CHAT,
510
+ ModelType.DEEPSEEK_REASONER,
446
511
  }:
447
512
  return 64_000
448
513
  elif self in {
@@ -475,19 +540,31 @@ class ModelType(UnifiedModelType, Enum):
475
540
  ModelType.NVIDIA_LLAMA3_2_3B_INSTRUCT,
476
541
  ModelType.NVIDIA_LLAMA3_3_70B_INSTRUCT,
477
542
  ModelType.GROQ_LLAMA_3_3_70B,
543
+ ModelType.SAMBA_LLAMA_3_1_70B,
544
+ ModelType.SGLANG_LLAMA_3_1_8B,
545
+ ModelType.SGLANG_LLAMA_3_1_70B,
546
+ ModelType.SGLANG_LLAMA_3_1_405B,
547
+ ModelType.SGLANG_LLAMA_3_2_1B,
548
+ ModelType.SGLANG_MIXTRAL_NEMO,
478
549
  }:
479
550
  return 128_000
480
551
  elif self in {
481
552
  ModelType.GROQ_LLAMA_3_1_8B,
482
- ModelType.GROQ_LLAMA_3_1_70B,
483
- ModelType.GROQ_LLAMA_3_1_405B,
484
553
  ModelType.QWEN_PLUS,
485
554
  ModelType.QWEN_TURBO,
486
555
  ModelType.QWEN_CODER_TURBO,
556
+ ModelType.TOGETHER_LLAMA_3_1_8B,
557
+ ModelType.TOGETHER_LLAMA_3_1_70B,
558
+ ModelType.TOGETHER_LLAMA_3_1_405B,
559
+ ModelType.TOGETHER_LLAMA_3_3_70B,
560
+ ModelType.SGLANG_QWEN_2_5_7B,
561
+ ModelType.SGLANG_QWEN_2_5_32B,
562
+ ModelType.SGLANG_QWEN_2_5_72B,
487
563
  }:
488
564
  return 131_072
489
565
  elif self in {
490
566
  ModelType.O1,
567
+ ModelType.O3_MINI,
491
568
  ModelType.CLAUDE_2_1,
492
569
  ModelType.CLAUDE_3_OPUS,
493
570
  ModelType.CLAUDE_3_SONNET,
@@ -520,6 +597,11 @@ class EmbeddingModelType(Enum):
520
597
  TEXT_EMBEDDING_3_SMALL = "text-embedding-3-small"
521
598
  TEXT_EMBEDDING_3_LARGE = "text-embedding-3-large"
522
599
 
600
+ JINA_EMBEDDINGS_V3 = "jina-embeddings-v3"
601
+ JINA_CLIP_V2 = "jina-clip-v2"
602
+ JINA_COLBERT_V2 = "jina-colbert-v2"
603
+ JINA_EMBEDDINGS_V2_BASE_CODE = "jina-embeddings-v2-base-code"
604
+
523
605
  MISTRAL_EMBED = "mistral-embed"
524
606
 
525
607
  @property
@@ -531,6 +613,16 @@ class EmbeddingModelType(Enum):
531
613
  EmbeddingModelType.TEXT_EMBEDDING_3_LARGE,
532
614
  }
533
615
 
616
+ @property
617
+ def is_jina(self) -> bool:
618
+ r"""Returns whether this type of models is an Jina model."""
619
+ return self in {
620
+ EmbeddingModelType.JINA_EMBEDDINGS_V3,
621
+ EmbeddingModelType.JINA_CLIP_V2,
622
+ EmbeddingModelType.JINA_COLBERT_V2,
623
+ EmbeddingModelType.JINA_EMBEDDINGS_V2_BASE_CODE,
624
+ }
625
+
534
626
  @property
535
627
  def is_mistral(self) -> bool:
536
628
  r"""Returns whether this type of models is an Mistral-released
@@ -542,7 +634,20 @@ class EmbeddingModelType(Enum):
542
634
 
543
635
  @property
544
636
  def output_dim(self) -> int:
545
- if self is EmbeddingModelType.TEXT_EMBEDDING_ADA_2:
637
+ if self in {
638
+ EmbeddingModelType.JINA_COLBERT_V2,
639
+ }:
640
+ return 128
641
+ elif self in {
642
+ EmbeddingModelType.JINA_EMBEDDINGS_V2_BASE_CODE,
643
+ }:
644
+ return 768
645
+ elif self in {
646
+ EmbeddingModelType.JINA_EMBEDDINGS_V3,
647
+ EmbeddingModelType.JINA_CLIP_V2,
648
+ }:
649
+ return 1024
650
+ elif self is EmbeddingModelType.TEXT_EMBEDDING_ADA_2:
546
651
  return 1536
547
652
  elif self is EmbeddingModelType.TEXT_EMBEDDING_3_SMALL:
548
653
  return 1536
@@ -112,7 +112,7 @@ class OpenAITokenCounter(BaseTokenCounter):
112
112
  elif ("gpt-3.5-turbo" in self.model) or ("gpt-4" in self.model):
113
113
  self.tokens_per_message = 3
114
114
  self.tokens_per_name = 1
115
- elif "o1" in self.model:
115
+ elif ("o1" in self.model) or ("o3" in self.model):
116
116
  self.tokens_per_message = 2
117
117
  self.tokens_per_name = 1
118
118
  else:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: camel-ai
3
- Version: 0.2.17
3
+ Version: 0.2.19
4
4
  Summary: Communicative Agents for AI Society Study
5
5
  License: Apache-2.0
6
6
  Keywords: communicative-ai,ai-societies,artificial-intelligence,deep-learning,multi-agent-systems,cooperative-ai,natural-language-processing,large-language-models
@@ -71,7 +71,7 @@ Requires-Dist: neo4j (>=5.18.0,<6.0.0) ; extra == "rag" or extra == "storage" or
71
71
  Requires-Dist: newspaper3k (>=0.2.8,<0.3.0) ; extra == "web-tools" or extra == "all"
72
72
  Requires-Dist: notion-client (>=2.2.1,<3.0.0) ; extra == "communication-tools" or extra == "all"
73
73
  Requires-Dist: numpy (>=1,<2)
74
- Requires-Dist: openai (>=1.58.1,<2.0.0)
74
+ Requires-Dist: openai (>=1.59.7,<2.0.0)
75
75
  Requires-Dist: openapi-spec-validator (>=0.7.1,<0.8.0) ; extra == "document-tools" or extra == "all"
76
76
  Requires-Dist: openbb (>=4.3.5,<5.0.0) ; extra == "data-tools" or extra == "all"
77
77
  Requires-Dist: opencv-python (>=4,<5) ; extra == "huggingface" or extra == "all"
@@ -418,6 +418,7 @@ Practical guides and tutorials for implementing specific functionalities in CAME
418
418
  | **[Dynamic Travel Planner Role-Playing: Multi-Agent System with Real-Time Insights Powered by Dappier](https://docs.camel-ai.org/cookbooks/applications/dynamic_travel_planner.html)** | Explore an innovative approach to travel planning, blending AI-driven role-playing and real-time data for seamless experiences. |
419
419
  | **[Customer Service Discord Bot with Agentic RAG](https://docs.camel-ai.org/cookbooks/applications/customer_service_Discord_bot_using_SambaNova_with_agentic_RAG.html)** | Learn how to build a robust customer service bot for Discord using Agentic RAG. |
420
420
  | **[Customer Service Discord Bot with Local Model](https://docs.camel-ai.org/cookbooks/applications/customer_service_Discord_bot_using_local_model_with_agentic_RAG.html)** | Learn how to build a robust customer service bot for Discord using Agentic RAG which supports local deployment. |
421
+ | **[Customer Service Discord Bot for Finance with OpenBB](https://docs.camel-ai.org/cookbooks/applications/finance_discord_bot.html)**| Learn how to build a sipmle yet powerful financial data assistant Discord bot using OpenBB tools. |
421
422
 
422
423
  ### Data Processing
423
424
  | Cookbook | Description |
@@ -456,6 +457,8 @@ We implemented amazing research ideas from other works for you to build, compare
456
457
 
457
458
  - `Self-Instruct` from *Yizhong Wang et al.*: [SELF-INSTRUCT: Aligning Language Models with Self-Generated Instructions](https://arxiv.org/pdf/2212.10560). [[Example](https://github.com/camel-ai/camel/blob/master/examples/datagen/self_instruct/self_instruct.py)]
458
459
 
460
+ - `Source2Synth` from *Alisia Lupidi et al.*: [Source2Synth: Synthetic Data Generation and Curation Grounded in Real Data Sources](https://arxiv.org/abs/2409.08239). [[Example](https://github.com/camel-ai/camel/blob/master/examples/datagen/source2synth.py)]
461
+
459
462
  ## Other Research Works Based on Camel
460
463
  - [Agent Trust](http://agent-trust.camel-ai.org/): Can Large Language Model Agents Simulate Human Trust Behavior?
461
464
 
@@ -1,13 +1,13 @@
1
- camel/__init__.py,sha256=GMXArXemoUFgm0mT_15RdGHxa2wiUJ4JTwFT5qN02Fk,912
1
+ camel/__init__.py,sha256=DgGR3wSLBU3YxNfaiM5B0l2iOual_VnNMGeDb0_BDro,912
2
2
  camel/agents/__init__.py,sha256=LcS4m8s97-yADfznvcaAdUe9W0E9h3m6zrSc9H6m9so,1545
3
3
  camel/agents/base.py,sha256=c4bJYL3G3Z41SaFdMPMn8ZjLdFiFaVOFO6EQIfuCVR8,1124
4
- camel/agents/chat_agent.py,sha256=YmcG-zNhmsc-JvcAh2XNz6OTQVggBKIOYjqEGrMUjcA,56299
4
+ camel/agents/chat_agent.py,sha256=KJM7FbcuOYcTF-PT9IYKtONjjpa-ZhsPFlrlCKf8zW0,56657
5
5
  camel/agents/critic_agent.py,sha256=qFVlHlQo0CVgmPWfWYLT8_oP_KyzCLFsQw_nN_vu5Bs,7487
6
6
  camel/agents/deductive_reasoner_agent.py,sha256=6BZGaq1hR6hKJuQtOfoYQnk_AkZpw_Mr7mUy2MspQgs,13540
7
7
  camel/agents/embodied_agent.py,sha256=XBxBu5ZMmSJ4B2U3Z7SMwvLlgp6yNpaBe8HNQmY9CZA,7536
8
8
  camel/agents/knowledge_graph_agent.py,sha256=MxG40XL8rs9sxcK8BKDLKurZz-Jui-oJOpYJaR5ZjMo,8962
9
- camel/agents/multi_hop_generator_agent.py,sha256=ivPaNNdzjfttc4XKnCBiz8lvGh6j7YUQPgnPtbVM5J4,3182
10
- camel/agents/programmed_agent_instruction.py,sha256=-cLnDSjeP5Fojct1AC9C3dTKsE49_ix7oFg9EgqQbxY,5418
9
+ camel/agents/multi_hop_generator_agent.py,sha256=aRfvDv0GXCsP49An7F-9l87jh9osSxWD565MmGrKH78,4324
10
+ camel/agents/programmed_agent_instruction.py,sha256=99fLe41che3X6wPpNPJXRwl4If6EoQqQVWIoT3DKE1s,7124
11
11
  camel/agents/role_assignment_agent.py,sha256=8bkTc14XToFHkP-ZOef5KP0P4hTlCDv0eNsDZPYuukA,5088
12
12
  camel/agents/search_agent.py,sha256=7sTt4FvFI3xeWgauBneDnwAguWy86Cu9oB2tscYqFc4,4616
13
13
  camel/agents/task_agent.py,sha256=KfJvZ5vOjjbrn20UGqSMnf6lds5ydfXdb7eNMxBU5vs,14654
@@ -34,10 +34,10 @@ camel/configs/__init__.py,sha256=4Q9kpo_hs7wKrpMMTCMXNtjMXdWsQ1GcUj2wB4dEODk,290
34
34
  camel/configs/anthropic_config.py,sha256=WIIyPYx7z70jiJoCc1Rz_58jrXRirpyJMlr0FrIii2I,3435
35
35
  camel/configs/base_config.py,sha256=RrlOwwTUXeTjsDChZXUZIBK1uCojyavEbX21bGVLuog,3286
36
36
  camel/configs/cohere_config.py,sha256=joF4GHqoTIRuEDlyTmxW5Ud23psE0xP1VCcEvKychko,3997
37
- camel/configs/deepseek_config.py,sha256=ZH6VELkvZ0rjA64PTwcvINDo4PWtkiPMTY7VwNXxc0I,6685
38
- camel/configs/gemini_config.py,sha256=m4p9zijSBmIba41NbSa997NRz2HumiRcb5nTICAMPXM,5686
37
+ camel/configs/deepseek_config.py,sha256=jME5rfx8MYwyykZkAXZG-QhpMe1IBOvF9W38s5t9Fq0,6685
38
+ camel/configs/gemini_config.py,sha256=tA-Gpo1aNlAdigs1TjNjG517hrTqfo4Pj3WPZfqp7Oc,5701
39
39
  camel/configs/groq_config.py,sha256=Xe82_EbEYfacNXQApIHZiXw-NscufZxnLau72YEy_iA,5744
40
- camel/configs/internlm_config.py,sha256=CyByp-I93Vry_WKypq8E-qxfbgQ-VuONJFrFyjtAZx0,2979
40
+ camel/configs/internlm_config.py,sha256=I1Hcyj5r3Sq7WUu0ypEUroqtOGbI2dXawUS6GVGhW6U,2979
41
41
  camel/configs/litellm_config.py,sha256=oa6b67M0UotlvN7NuXrSUXLrskdpm3RMcew0rBfSsBc,4686
42
42
  camel/configs/mistral_config.py,sha256=ul7AAeG3172PtodEEruAZky0OURwgp6YeNq8ma-4os4,3463
43
43
  camel/configs/nvidia_config.py,sha256=1Oc3tulHOqAfx1mkrEywrxKIV1SBNzPm0CNrWgj9HXo,3226
@@ -46,7 +46,7 @@ camel/configs/openai_config.py,sha256=CRqM00fA4mXsaADuhwbwwceW3t4o0ae9_7CiKjJg1g
46
46
  camel/configs/qwen_config.py,sha256=wLzkv0G3qzcqRN31oHv-6OXKcB1-ILlROj4xgdFSmPM,4648
47
47
  camel/configs/reka_config.py,sha256=QhTa4hUKz_TF3txTJRNlLSJ391uphEqZOG0zev6bI7w,3498
48
48
  camel/configs/samba_config.py,sha256=2__Xj0HIsFWN38rsbZl9a-lXwOO5XHXoo_j7VwiUDpA,8825
49
- camel/configs/sglang_config.py,sha256=NPgwUZZ3igCl5LTMDYZzkzOvmk5vbgnbv_UUUlMRI74,3509
49
+ camel/configs/sglang_config.py,sha256=EFr5TE8kXiZbx2lKm4r3Y5noDHGTIOPL6IlQHDum6kQ,3796
50
50
  camel/configs/togetherai_config.py,sha256=bzFlDPR78NwvGCIPAhplITiy8WsGrdK4IDBbfQ7xGSw,5655
51
51
  camel/configs/vllm_config.py,sha256=9F81FtJGbO8T3aGFggDOVUL5qequPmg882ZSXjIeFW0,6110
52
52
  camel/configs/yi_config.py,sha256=ZvrOroEdYgdyBWcYxn3CHyOA64P5I16XBVbgfusxaPQ,2745
@@ -62,14 +62,19 @@ camel/datagen/self_instruct/filter/__init__.py,sha256=UiGBfDRYO-3Z3dhaxAFVh4F8PF
62
62
  camel/datagen/self_instruct/filter/filter_function.py,sha256=-voPwP83c_bkZrSAhwludBCtfsKDFG_jlDHcNUOLV7o,6691
63
63
  camel/datagen/self_instruct/filter/filter_registry.py,sha256=5M_aNIopBeBj7U4fUsrAQpXQ2cZT6o6GaIIo0briFw0,2125
64
64
  camel/datagen/self_instruct/filter/instruction_filter.py,sha256=la_7P5bVdrk2qffnYFI2Ie3cjCEEHBxe4HB8PZ5jMq0,3426
65
- camel/datagen/self_instruct/self_instruct.py,sha256=3HRAptA2ZqCLdTs-_adNTReAMirDjSlQsCXzfvxk4GM,13995
65
+ camel/datagen/self_instruct/self_instruct.py,sha256=W_0LSSnTBcqZD1dtdWIgXeTcgFEVqjLyTZojj6lYC-0,14076
66
66
  camel/datagen/self_instruct/templates.py,sha256=7YMOUcIig6vLjqSwkWCq8XeRCjWq0Mfyzptn7DOmeAo,19480
67
+ camel/datagen/source2synth/__init__.py,sha256=Kd6BBgIPRBm_VPAbJb-V_QKYyRDcX3fTQTm2Bl0Vkpc,1056
68
+ camel/datagen/source2synth/data_processor.py,sha256=2_E1sS4BZNiAUMuB4ItomhU2oIFI6PwQKqDVlSv5ILc,17783
69
+ camel/datagen/source2synth/models.py,sha256=gTdiKoGeHrZBPPOts6mKU9ZeUKh21uDcrClsm8ysEEA,3304
70
+ camel/datagen/source2synth/user_data_processor_config.py,sha256=WpIePsxzFbpv3wFl0Wpe6kl0fJmR2AajiBH2OOJvFC0,2409
67
71
  camel/datahubs/__init__.py,sha256=1a8fRuzgirO2pHtPnuisZ76iF_AN9GxMFq9gwFKWE5I,906
68
72
  camel/datahubs/base.py,sha256=4QKWiJaeL5ReQpyTAbOtzHs-2CzAYbVyoMngYwdpZGU,4357
69
73
  camel/datahubs/huggingface.py,sha256=OrrFUAQ9p92T4P2M_jq_ykR_Z6lfjd52CZszefm8Ls0,14883
70
74
  camel/datahubs/models.py,sha256=tGb9OP_aomIhnwc0VapJjTg9PmyV_QCp5to9sABXF0Y,978
71
- camel/embeddings/__init__.py,sha256=uOPul-p528_0TcCoBIe8-jhU4p1z1r5Lnsy0IFle3HQ,1200
75
+ camel/embeddings/__init__.py,sha256=YKCFO_YVY-x4A4uWmRuoIEtltrilBmC17DkCcK4zSj8,1263
72
76
  camel/embeddings/base.py,sha256=mxqFkWh2AfbxuVKPOqVx16fCznmuSh9QXGjaEeZHvoY,2190
77
+ camel/embeddings/jina_embedding.py,sha256=N8rISoLi3-tztj0-lCMLC4HAxrNu0wUUBFCEJvYkmCE,6233
73
78
  camel/embeddings/mistral_embedding.py,sha256=JaHjcHrc4U216QfGA4NxOSLrgYB9lM19VR2mIMAkuvk,3287
74
79
  camel/embeddings/openai_compatible_embedding.py,sha256=48T1fNUkgifoPiVHPJ7HJERekP1sENy3t07S1ENiwWk,3158
75
80
  camel/embeddings/openai_embedding.py,sha256=DZh5OuXzBo1fMXifgyStUMm_BFaK1vQYrKdFtXqLKdg,3655
@@ -81,7 +86,7 @@ camel/interpreters/__init__.py,sha256=NOQUsg7gR84zO8nBXu4JGUatsxSDJqZS6otltjXfop
81
86
  camel/interpreters/base.py,sha256=F026f2ZnvHwikSMbk6APYNvB9qP4Ye5quSkTbFKV3O0,1898
82
87
  camel/interpreters/docker_interpreter.py,sha256=Uo5r2jcJGjC6rn5Yzx9qLzlXTsA5RH7AnFe7I0rxo10,8700
83
88
  camel/interpreters/e2b_interpreter.py,sha256=UC0en39x705cnnMCX4GxN7Tx0gCpu5yuWOFSBl_TagE,4815
84
- camel/interpreters/internal_python_interpreter.py,sha256=YYAXAmDWayrPQgeae7UVdD_k35DHxqUyFuHfDsApQjc,21860
89
+ camel/interpreters/internal_python_interpreter.py,sha256=9psFm8mkN5-5WdTW__VBjDoh_u-PCifJMQYeo0DEoZo,22464
85
90
  camel/interpreters/interpreter_error.py,sha256=uEhcmHmmcajt5C9PLeHs21h1fE6cmyt23tCAGie1kTA,880
86
91
  camel/interpreters/ipython_interpreter.py,sha256=-erOR6imuh5pUtpbUYky3zoLDr30Y5E7lm59BwwxzNs,5976
87
92
  camel/interpreters/subprocess_interpreter.py,sha256=HZBpYBI_W1WPZ6W0uEXYnlAzGC-7fJChGMXl1yoMTss,6909
@@ -107,21 +112,21 @@ camel/messages/__init__.py,sha256=Px-gTFp2Kcgbeb2sZQ_f4tqjoLHE-QEOiMHIMfPrvTw,19
107
112
  camel/messages/base.py,sha256=1jCeQn0Rs7vujjG2iqlBG449dGqM1INZVlVZGG5IY2E,19614
108
113
  camel/messages/conversion/__init__.py,sha256=8B4C-0wj-dm925YRKNyx31WYK25PWpME7Q9jPtx2jkY,1047
109
114
  camel/messages/conversion/alpaca.py,sha256=jBU2bMhzNjzptGuoasThYvFov_cYPCYt3pEfs0T7z7U,4163
110
- camel/messages/conversion/conversation_models.py,sha256=uWVga8CyHG4Q61ABMz8KtZngRvhXJeJ5cY2QZTSD9Wo,5401
115
+ camel/messages/conversion/conversation_models.py,sha256=f2ybtYdCbILq9IYgaHkQ57yYxDdCBSspKrfaArZvNw8,5300
111
116
  camel/messages/conversion/sharegpt/__init__.py,sha256=oWUuHV5w85kxqhz_hoElLmCfzLm-ccku-fM9SnUJ5zI,794
112
117
  camel/messages/conversion/sharegpt/function_call_formatter.py,sha256=cn7e7CfmxEVFlfOqhjhNuA8nuWvWD6hXYn-3okXNxxQ,1832
113
118
  camel/messages/conversion/sharegpt/hermes/__init__.py,sha256=mxuMSm-neaTgInIjYXuIVdC310E6jKJzM3IdtaJ4qY4,812
114
119
  camel/messages/conversion/sharegpt/hermes/hermes_function_formatter.py,sha256=-9TT8iOQ-ieKSKR_PmJSA5Bi0uBx-qR7WQ6vxuFkorM,4639
115
- camel/messages/func_message.py,sha256=y-MYXqbWL3QU2l9xLWZ8hAL7Yk7S_IgnHYVdnw5CfGc,5946
120
+ camel/messages/func_message.py,sha256=EjsUor40oUUKrHwolRpCH0sJECcqnp2mm4072tNWTPg,5939
116
121
  camel/models/__init__.py,sha256=CWSHG0qbIT64pQ6m3_NdXr9iydGF_8-5x-MJfcGtT0M,2456
117
122
  camel/models/anthropic_model.py,sha256=BOj4vEtYVWbgy3DmBBlFh6LPXHbi1-LCPWzIxFuw9u4,5829
118
123
  camel/models/azure_openai_model.py,sha256=ptL4YK8KkAbOA6XDxIhcEqxPOVGrYmzXqBzdsZAyHss,6083
119
124
  camel/models/base_model.py,sha256=rxRZc31cKone4OGuvXi14FI_O9TC1aBvIy8WFSlVeSI,5727
120
125
  camel/models/cohere_model.py,sha256=4Sm-YvQnSquz8L4EN9qGn0qz6WTz4cm_BQtP7_NZOHQ,10731
121
- camel/models/deepseek_model.py,sha256=3RfF3-QXzN3mvtYwno6nyS20_-gqxdua_5k4ywDzN0A,5086
126
+ camel/models/deepseek_model.py,sha256=WBZE63hB61XE4b_DLunlTX54sw74k-ImAVTyE9CDrSo,8140
122
127
  camel/models/fish_audio_model.py,sha256=mid-wdV_hkxHay-vgnF3zshwdXLyO4eM31139_0BXzo,5586
123
128
  camel/models/gemini_model.py,sha256=mS3_91vlLifsIolDR3TYRFzpV67iAnFiIRAZ8F5O7Qc,5462
124
- camel/models/groq_model.py,sha256=dSD23iHOeQ7ppDp34h2waSbRWRL0OjpsisA9_oUEprc,5014
129
+ camel/models/groq_model.py,sha256=gDLv1_gOIioNmTh7I_efM5FMEsELqeQGAtY7ipd85TU,4922
125
130
  camel/models/internlm_model.py,sha256=khWd570OU3OZJpjGhmq81tJ_OsZM1m3zcyNDmdTmgqo,5114
126
131
  camel/models/litellm_model.py,sha256=-9DcJlVBL25vsZOdA0UkEWt5G5PP8QaXXhcE2PRiwRw,5296
127
132
  camel/models/mistral_model.py,sha256=7OUTdTKzYPGYdi0n_hBAawlplYVR6XyvfzANHki660c,10182
@@ -132,7 +137,7 @@ camel/models/nvidia_model.py,sha256=7THp2MECXYBUpJWTZkkgGQqgsLfjXPMVb1aGWFA1vdg,
132
137
  camel/models/ollama_model.py,sha256=uiIgXmz6EqRsi3mBh8RAWopOom6rM77H4fP_Hp8cj3U,6057
133
138
  camel/models/openai_audio_models.py,sha256=61tGMxwOwXwh9RThXcQmkTCiGPEhau85_BM_nxgIKmY,10036
134
139
  camel/models/openai_compatible_model.py,sha256=NnDSj7e-SDYDGq0VTQVHCE0kRTJ2JDb62Z8z0ZcpygA,4059
135
- camel/models/openai_model.py,sha256=C7hHK-srzidaEl8edrHmhdYJBzMzvTBikHNQtOViheU,6848
140
+ camel/models/openai_model.py,sha256=3pqVoGs7k_H896qPbPs0SXUPKXelGSj9eE3HaG_xs4s,6483
136
141
  camel/models/qwen_model.py,sha256=HqojMgFaUcOa-P6f4OWEyaRyNmDJvtfQIHUp_CtPjm4,5044
137
142
  camel/models/reka_model.py,sha256=NalBlxuW-wu7rfVcHS5YAFDOSASzutnwukjN2e6pjUo,8329
138
143
  camel/models/reward/__init__.py,sha256=MqPN6wXh7Y1SoeNoFlYaMG6xHzLG0CYsv_3kB2atIQk,984
@@ -221,9 +226,6 @@ camel/storages/vectordb_storages/__init__.py,sha256=NCXSLGFE5BuGWDYrsXuiJIsOJObw
221
226
  camel/storages/vectordb_storages/base.py,sha256=XhgTthEg4jEIsycwKy48QXj4POnCx9j9UndlTM_cG9w,6675
222
227
  camel/storages/vectordb_storages/milvus.py,sha256=XGKSQQflvqvTCo92rrgmbwYtsJKY9JxphdEQqGXf_kA,13483
223
228
  camel/storages/vectordb_storages/qdrant.py,sha256=pDkhX3iu1rFCbiMz6F47EhgENCCVDx3ejh-zt988NtU,18011
224
- camel/synthetic_datagen/source2synth/data_processor.py,sha256=e7m5LgLuKmLoe_WyAXahE1YVqaN-yNwOTXQy9dB_9Eg,12103
225
- camel/synthetic_datagen/source2synth/models.py,sha256=l5iW9ziZOo73GHcujnJHqqmo-Z2HZef0umGeqz_bM4Y,2442
226
- camel/synthetic_datagen/source2synth/user_data_processor_config.py,sha256=LVT2zbS5TT8GULkshgEDfGUCBLG4FqNztv266EP5Rjg,2255
227
229
  camel/tasks/__init__.py,sha256=MuHwkw5GRQc8NOCzj8tjtBrr2Xg9KrcKp-ed_-2ZGIM,906
228
230
  camel/tasks/task.py,sha256=FpYYbxWrAvqFJ4KvbjIn-EnTGpe9u_emSWUpdIuCAZo,13178
229
231
  camel/tasks/task_prompt.py,sha256=3KZmKYKUPcTKe8EAZOZBN3G05JHRVt7oHY9ORzLVu1g,2150
@@ -277,7 +279,7 @@ camel/toolkits/open_api_toolkit.py,sha256=Venfq8JwTMQfzRzzB7AYmYUMEX35hW0BjIv_oz
277
279
  camel/toolkits/openbb_toolkit.py,sha256=tq4ER2utf9y4nsibU7g7jnRefA3UHw4DdXQlSrFhvJk,28802
278
280
  camel/toolkits/reddit_toolkit.py,sha256=tb0qwgtawgWe-PPotKVsKqMqkSiACP6k9MzKHuMbkXU,8886
279
281
  camel/toolkits/retrieval_toolkit.py,sha256=gMHk-Y-KDROGd-yX9ykfpwAf6ViO678j9Q9Ju3sfBGo,3695
280
- camel/toolkits/search_toolkit.py,sha256=QTq5_OgZtuWAc41D5Vn7QtKG22-suItD2N5L166g_KY,29398
282
+ camel/toolkits/search_toolkit.py,sha256=_QgoLeLeQIwev2o7MDXrPyihr5b4XWasrcN8qBK_izg,29327
281
283
  camel/toolkits/slack_toolkit.py,sha256=n8cn3kZIc27B-2KMTRK6Nsdan37SwMqBiBi1PMtuUvQ,10744
282
284
  camel/toolkits/stripe_toolkit.py,sha256=cQJlzu7qXSiClazgr-D3xRAcI_PK_csTT-xcwaTrHYc,9623
283
285
  camel/toolkits/twitter_toolkit.py,sha256=a2OLSJSW2wY7pOwOApb1qchZPXzH22Rbgm9Yd7-7vrA,15826
@@ -285,7 +287,7 @@ camel/toolkits/video_toolkit.py,sha256=n1P7F_cjdnC2jfUQQiJnhueRYA83GIjUF7HWIrES5
285
287
  camel/toolkits/weather_toolkit.py,sha256=qHAMD56zqd5GWnEWiaA_0aBDwvgacdx0pAHScinY4GY,6965
286
288
  camel/toolkits/whatsapp_toolkit.py,sha256=H_83AFCIoBMvZUcfUvfRTIAjfR2DR79xP2J-rfQKtNo,6326
287
289
  camel/types/__init__.py,sha256=_NYwmy412tubPYJon26fS9itGnylP48NLFKgwyMiJNs,2251
288
- camel/types/enums.py,sha256=DnregHhO2yv61k39qT0CRA7ZP8s4-GAGMdMaXzIC6dM,24695
290
+ camel/types/enums.py,sha256=0N21PetU0A-EwO5eiAF8Q4QXibsFYcjGElnWBXNNn8E,28856
289
291
  camel/types/openai_types.py,sha256=7Vlci1uRbpSS81B958Z8ADnkzVyqxV7O5H8hv0i-tdo,2328
290
292
  camel/types/unified_model_type.py,sha256=FT-abBHFTRN6qMRSVa_Lqljoc7JmyyMs8PeJfQUOSag,4119
291
293
  camel/utils/__init__.py,sha256=0K8HKzUlOStBjo9Mt3tgFbv6YINMIiF0b7MSWRyZ-NA,2471
@@ -293,8 +295,8 @@ camel/utils/async_func.py,sha256=4esRhhGrvfm-iJRloUbU-sYWyHp_mt0bBBXpwyCv6vc,155
293
295
  camel/utils/commons.py,sha256=Ph5O_vihyH85BfQ-A4Z2kc0uO45QXtwQr0qbfpbK6Rg,21934
294
296
  camel/utils/constants.py,sha256=MQD3bgLIq_NATp0D1iFkrwfkCwVX-PAOSXheTkkEdkY,1410
295
297
  camel/utils/response_format.py,sha256=9KrbwtOM9cA3LSjTgLiK7oKy-53_uMh1cvpyNwwJpng,2419
296
- camel/utils/token_counting.py,sha256=wLVgCFiLOWAzW2NtrZ-1t1VIsf2MT8hVAbc3pqAx4V4,15319
297
- camel_ai-0.2.17.dist-info/LICENSE,sha256=id0nB2my5kG0xXeimIu5zZrbHLS6EQvxvkKkzIHaT2k,11343
298
- camel_ai-0.2.17.dist-info/METADATA,sha256=8KO-En_8opfeGKUAok5Q0VRtPyqKBgcqrZuAswFfA4M,35136
299
- camel_ai-0.2.17.dist-info/WHEEL,sha256=IYZQI976HJqqOpQU6PHkJ8fb3tMNBFjg-Cn-pwAbaFM,88
300
- camel_ai-0.2.17.dist-info/RECORD,,
298
+ camel/utils/token_counting.py,sha256=jJSK_kJKovuDESP2WuYVGHWS7Q-xTMIQsLK0AMS_Nw8,15345
299
+ camel_ai-0.2.19.dist-info/LICENSE,sha256=id0nB2my5kG0xXeimIu5zZrbHLS6EQvxvkKkzIHaT2k,11343
300
+ camel_ai-0.2.19.dist-info/METADATA,sha256=xAY3OB9Ac86gQYQoqlAsOTXKpRyNzl0myIOMpTv3iSE,35632
301
+ camel_ai-0.2.19.dist-info/WHEEL,sha256=IYZQI976HJqqOpQU6PHkJ8fb3tMNBFjg-Cn-pwAbaFM,88
302
+ camel_ai-0.2.19.dist-info/RECORD,,