camel-ai 0.2.55__py3-none-any.whl → 0.2.57__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of camel-ai might be problematic. Click here for more details.

camel/__init__.py CHANGED
@@ -14,7 +14,7 @@
14
14
 
15
15
  from camel.logger import disable_logging, enable_logging, set_log_level
16
16
 
17
- __version__ = '0.2.55'
17
+ __version__ = '0.2.57'
18
18
 
19
19
  __all__ = [
20
20
  '__version__',
@@ -1311,7 +1311,9 @@ class ChatAgent(BaseAgent):
1311
1311
  response_id: str = ""
1312
1312
  # All choices in one response share one role
1313
1313
  for chunk in response:
1314
- response_id = chunk.id
1314
+ # Some model platforms like siliconflow may return None for the
1315
+ # chunk.id
1316
+ response_id = chunk.id if chunk.id else str(uuid.uuid4())
1315
1317
  self._handle_chunk(
1316
1318
  chunk, content_dict, finish_reasons_dict, output_messages
1317
1319
  )
@@ -1351,7 +1353,9 @@ class ChatAgent(BaseAgent):
1351
1353
  response_id: str = ""
1352
1354
  # All choices in one response share one role
1353
1355
  async for chunk in response:
1354
- response_id = chunk.id
1356
+ # Some model platforms like siliconflow may return None for the
1357
+ # chunk.id
1358
+ response_id = chunk.id if chunk.id else str(uuid.uuid4())
1355
1359
  self._handle_chunk(
1356
1360
  chunk, content_dict, finish_reasons_dict, output_messages
1357
1361
  )
@@ -125,6 +125,11 @@ class GeminiModel(OpenAICompatibleModel):
125
125
  )
126
126
  messages = self._process_messages(messages)
127
127
  if response_format:
128
+ if tools:
129
+ raise ValueError(
130
+ "Gemini does not support function calling with "
131
+ "response format."
132
+ )
128
133
  return self._request_parse(messages, response_format)
129
134
  else:
130
135
  return self._request_chat_completion(messages, tools)
@@ -155,6 +160,11 @@ class GeminiModel(OpenAICompatibleModel):
155
160
  )
156
161
  messages = self._process_messages(messages)
157
162
  if response_format:
163
+ if tools:
164
+ raise ValueError(
165
+ "Gemini does not support function calling with "
166
+ "response format."
167
+ )
158
168
  return await self._arequest_parse(messages, response_format)
159
169
  else:
160
170
  return await self._arequest_chat_completion(messages, tools)
@@ -25,6 +25,7 @@ if TYPE_CHECKING:
25
25
  from openai import AsyncStream
26
26
 
27
27
  from camel.configs import MISTRAL_API_PARAMS, MistralConfig
28
+ from camel.logger import get_logger
28
29
  from camel.messages import OpenAIMessage
29
30
  from camel.models import BaseModelBackend
30
31
  from camel.models._utils import try_modify_message_with_format
@@ -36,6 +37,8 @@ from camel.utils import (
36
37
  dependencies_required,
37
38
  )
38
39
 
40
+ logger = get_logger(__name__)
41
+
39
42
  try:
40
43
  if os.getenv("AGENTOPS_API_KEY") is not None:
41
44
  from agentops import LLMEvent, record
@@ -235,7 +238,38 @@ class MistralModel(BaseModelBackend):
235
238
  response_format: Optional[Type[BaseModel]] = None,
236
239
  tools: Optional[List[Dict[str, Any]]] = None,
237
240
  ) -> Union[ChatCompletion, AsyncStream[ChatCompletionChunk]]:
238
- raise NotImplementedError("Mistral does not support async inference.")
241
+ logger.warning(
242
+ "Mistral does not support async inference, using sync "
243
+ "inference instead."
244
+ )
245
+ request_config = self._prepare_request(
246
+ messages, response_format, tools
247
+ )
248
+ mistral_messages = self._to_mistral_chatmessage(messages)
249
+
250
+ response = self._client.chat.complete(
251
+ messages=mistral_messages,
252
+ model=self.model_type,
253
+ **request_config,
254
+ )
255
+
256
+ openai_response = self._to_openai_response(response) # type: ignore[arg-type]
257
+
258
+ # Add AgentOps LLM Event tracking
259
+ if LLMEvent:
260
+ llm_event = LLMEvent(
261
+ thread_id=openai_response.id,
262
+ prompt=" ".join(
263
+ [message.get("content") for message in messages] # type: ignore[misc]
264
+ ),
265
+ prompt_tokens=openai_response.usage.prompt_tokens, # type: ignore[union-attr]
266
+ completion=openai_response.choices[0].message.content,
267
+ completion_tokens=openai_response.usage.completion_tokens, # type: ignore[union-attr]
268
+ model=self.model_type,
269
+ )
270
+ record(llm_event)
271
+
272
+ return openai_response
239
273
 
240
274
  def _run(
241
275
  self,
@@ -71,6 +71,7 @@ from .jina_reranker_toolkit import JinaRerankerToolkit
71
71
  from .pulse_mcp_search_toolkit import PulseMCPSearchToolkit
72
72
  from .klavis_toolkit import KlavisToolkit
73
73
  from .aci_toolkit import ACIToolkit
74
+ from .playwright_mcp_toolkit import PlaywrightMCPToolkit
74
75
 
75
76
 
76
77
  __all__ = [
@@ -130,4 +131,5 @@ __all__ = [
130
131
  'PulseMCPSearchToolkit',
131
132
  'KlavisToolkit',
132
133
  'ACIToolkit',
134
+ 'PlaywrightMCPToolkit',
133
135
  ]
@@ -0,0 +1,69 @@
1
+ # ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
2
+ # Licensed under the Apache License, Version 2.0 (the "License");
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an "AS IS" BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
14
+
15
+ from typing import List, Optional
16
+
17
+ from camel.toolkits import BaseToolkit, FunctionTool
18
+
19
+ from .mcp_toolkit import MCPToolkit
20
+
21
+
22
+ class PlaywrightMCPToolkit(BaseToolkit):
23
+ r"""PlaywrightMCPToolkit provides an interface for interacting with web
24
+ browsers using the Playwright automation library through the Model Context
25
+ Protocol (MCP).
26
+
27
+ Attributes:
28
+ timeout (Optional[float]): Connection timeout in seconds.
29
+ (default: :obj:`None`)
30
+
31
+ Note:
32
+ Currently only supports asynchronous operation mode.
33
+ """
34
+
35
+ def __init__(self, timeout: Optional[float] = None) -> None:
36
+ r"""Initializes the PlaywrightMCPToolkit with the specified timeout.
37
+
38
+ Args:
39
+ timeout (Optional[float]): Connection timeout in seconds.
40
+ (default: :obj:`None`)
41
+ """
42
+ super().__init__(timeout=timeout)
43
+
44
+ self._mcp_toolkit = MCPToolkit(
45
+ config_dict={
46
+ "mcpServers": {
47
+ "playwright": {
48
+ "command": "npx",
49
+ "args": ["@playwright/mcp@latest"],
50
+ }
51
+ }
52
+ }
53
+ )
54
+
55
+ async def connect(self):
56
+ r"""Explicitly connect to the Playwright MCP server."""
57
+ await self._mcp_toolkit.connect()
58
+
59
+ async def disconnect(self):
60
+ r"""Explicitly disconnect from the Playwright MCP server."""
61
+ await self._mcp_toolkit.disconnect()
62
+
63
+ def get_tools(self) -> List[FunctionTool]:
64
+ r"""Returns a list of tools provided by the PlaywrightMCPToolkit.
65
+
66
+ Returns:
67
+ List[FunctionTool]: List of available tools.
68
+ """
69
+ return self._mcp_toolkit.get_tools()
camel/types/enums.py CHANGED
@@ -15,8 +15,11 @@ import os
15
15
  from enum import Enum, EnumMeta
16
16
  from typing import cast
17
17
 
18
+ from camel.logger import get_logger
18
19
  from camel.types.unified_model_type import UnifiedModelType
19
20
 
21
+ logger = get_logger(__name__)
22
+
20
23
 
21
24
  class RoleType(Enum):
22
25
  ASSISTANT = "assistant"
@@ -196,6 +199,7 @@ class ModelType(UnifiedModelType, Enum):
196
199
  MISTRAL_MIXTRAL_8x22B = "open-mixtral-8x22b"
197
200
  MISTRAL_NEMO = "open-mistral-nemo"
198
201
  MISTRAL_PIXTRAL_12B = "pixtral-12b-2409"
202
+ MISTRAL_MEDIUM_3 = "mistral-medium-latest"
199
203
 
200
204
  # Reka models
201
205
  REKA_CORE = "reka-core"
@@ -596,6 +600,7 @@ class ModelType(UnifiedModelType, Enum):
596
600
  ModelType.MISTRAL_PIXTRAL_12B,
597
601
  ModelType.MISTRAL_8B,
598
602
  ModelType.MISTRAL_3B,
603
+ ModelType.MISTRAL_MEDIUM_3,
599
604
  }
600
605
 
601
606
  @property
@@ -1104,6 +1109,7 @@ class ModelType(UnifiedModelType, Enum):
1104
1109
  ModelType.NETMIND_DEEPSEEK_R1,
1105
1110
  ModelType.NETMIND_DEEPSEEK_V3,
1106
1111
  ModelType.NOVITA_DEEPSEEK_V3_0324,
1112
+ ModelType.MISTRAL_MEDIUM_3,
1107
1113
  }:
1108
1114
  return 128_000
1109
1115
  elif self in {
@@ -1197,7 +1203,11 @@ class ModelType(UnifiedModelType, Enum):
1197
1203
  }:
1198
1204
  return 10_000_000
1199
1205
  else:
1200
- raise ValueError("Unknown model type")
1206
+ logger.warning(
1207
+ f"Unknown model type {self}, set maximum token limit "
1208
+ f"to 999_999_999"
1209
+ )
1210
+ return 999_999_999
1201
1211
 
1202
1212
 
1203
1213
  class EmbeddingModelType(Enum):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: camel-ai
3
- Version: 0.2.55
3
+ Version: 0.2.57
4
4
  Summary: Communicative Agents for AI Society Study
5
5
  Project-URL: Homepage, https://www.camel-ai.org/
6
6
  Project-URL: Repository, https://github.com/camel-ai/camel
@@ -1,4 +1,4 @@
1
- camel/__init__.py,sha256=6jo6JFvIWniUwCXTnl_djUJ46s8RoBSOwnxlRrnmiRM,912
1
+ camel/__init__.py,sha256=w5hgs58ruWalSpq12-blpBnUVKcz68enl_rI6eePmx0,912
2
2
  camel/generators.py,sha256=JRqj9_m1PF4qT6UtybzTQ-KBT9MJQt18OAAYvQ_fr2o,13844
3
3
  camel/human.py,sha256=9X09UmxI2JqQnhrFfnZ3B9EzFmVfdSWQcjLWTIXKXe0,4962
4
4
  camel/logger.py,sha256=rZVeOVYuQ9RYJ5Tqyv0usqy0g4zaVEq4qSfZ9nd2640,5755
@@ -7,7 +7,7 @@ camel/agents/__init__.py,sha256=64weKqdvmpZcGWyVkO-OKASAmVUdrQjv60JApgPk_SA,1644
7
7
  camel/agents/_types.py,sha256=ryPRmEXnpNtbFT23GoAcwK-zxWWsIOqYu64mxMx_PhI,1430
8
8
  camel/agents/_utils.py,sha256=AR7Qqgbkmn4X2edYUQf1rdksGUyV5hm3iK1z-Dn0Mcg,6266
9
9
  camel/agents/base.py,sha256=c4bJYL3G3Z41SaFdMPMn8ZjLdFiFaVOFO6EQIfuCVR8,1124
10
- camel/agents/chat_agent.py,sha256=s19qJ0eK_bVX6P-yIoazO4cMUeAumwVY1XFNVKaDfVo,59281
10
+ camel/agents/chat_agent.py,sha256=bumSJSWfyc_8jGUX4Gb66Tma8pAfXGf4UqyGkyWrOog,59549
11
11
  camel/agents/critic_agent.py,sha256=qFVlHlQo0CVgmPWfWYLT8_oP_KyzCLFsQw_nN_vu5Bs,7487
12
12
  camel/agents/deductive_reasoner_agent.py,sha256=6BZGaq1hR6hKJuQtOfoYQnk_AkZpw_Mr7mUy2MspQgs,13540
13
13
  camel/agents/embodied_agent.py,sha256=XBxBu5ZMmSJ4B2U3Z7SMwvLlgp6yNpaBe8HNQmY9CZA,7536
@@ -171,12 +171,12 @@ camel/models/base_model.py,sha256=eDeUlgH8iS0Stk6zommzqce4dfD4Qj51tvgXUs5ys4s,14
171
171
  camel/models/cohere_model.py,sha256=OgRHxlPrei-NT5UVDFf6lVR88k6eKnmcZMyFj4XmONE,14880
172
172
  camel/models/deepseek_model.py,sha256=TLs-674MBM-vDxBkMwWn63q51HlQucE3HDxmFMe5c9o,9222
173
173
  camel/models/fish_audio_model.py,sha256=RCwORRIdCbjZXWWjjctpksPI2DnS0b68JjxunHBQ1xk,5981
174
- camel/models/gemini_model.py,sha256=WlLpFgkRbOiEUXtA7wsV4FQbIJ4UYjR6TgZQpbCEpto,10459
174
+ camel/models/gemini_model.py,sha256=f7rMcVpZIP0wVJGdz1pT0gGXtXuZjEc-_Z7ca9UTn8g,10823
175
175
  camel/models/groq_model.py,sha256=596VqRJ_yxv9Jz3sG7UVXVkIjZI1nX7zQAD709m4uig,3774
176
176
  camel/models/internlm_model.py,sha256=l7WjJ7JISCCqkezhEXzmjj_Mvhqhxxhsg4NuenP7w9w,4374
177
177
  camel/models/litellm_model.py,sha256=rlSt3EnBAAYyoIxq0_XTuRmRnc4RWvD2Z14yIrI_7uw,5942
178
178
  camel/models/lmstudio_model.py,sha256=_Lnv0e2ichks_MrNJGNIawEtGtP7T_xX8v0bFNNeWes,3641
179
- camel/models/mistral_model.py,sha256=lsvfxhwp9aDGRpGKLt6Mwtnlw27jFp8AbpGw0UTHJds,12186
179
+ camel/models/mistral_model.py,sha256=3tT59xJO0rwZK0Gs0RXtV6TC9g6uEO9gD7D_-NzhHDc,13399
180
180
  camel/models/model_factory.py,sha256=e-dRSZgNGXCgjgdxDl125qtdts7o9M0RQ6mY8qzoBO0,11470
181
181
  camel/models/model_manager.py,sha256=gfpL-WUxuTXgNeCkIVg8Y0zRvxMqRLX8JGt0XEAPQ8Y,9214
182
182
  camel/models/modelscope_model.py,sha256=aI7i50DSIE6MO2U_WvULan6Sk4b5d7iZoEHQaARo4FA,10487
@@ -295,7 +295,7 @@ camel/terminators/__init__.py,sha256=t8uqrkUnXEOYMXQDgaBkMFJ0EXFKI0kmx4cUimli3Ls
295
295
  camel/terminators/base.py,sha256=xmJzERX7GdSXcxZjAHHODa0rOxRChMSRboDCNHWSscs,1511
296
296
  camel/terminators/response_terminator.py,sha256=n3G5KP6Oj7-7WlRN0yFcrtLpqAJKaKS0bmhrWlFfCgQ,4982
297
297
  camel/terminators/token_limit_terminator.py,sha256=YWv6ZR8R9yI2Qnf_3xES5bEE_O5bb2CxQ0EUXfMh34c,2118
298
- camel/toolkits/__init__.py,sha256=zhX4kE2hClB5c-RyVyF9jeTdDKOzYp6L6nj91QlglgA,4572
298
+ camel/toolkits/__init__.py,sha256=JbvxFf-wUFClNSriXB56LkhaRipw4n4PEc2M9f5-47o,4657
299
299
  camel/toolkits/aci_toolkit.py,sha256=jhXMQggG22hd3dXdT3iJm7qWTH3KJC-TUVk1txoNWrM,16079
300
300
  camel/toolkits/arxiv_toolkit.py,sha256=Bs2-K1yfmqhEhHoQ0j00KoI8LpOd8M3ApXcvI_-ApVw,6303
301
301
  camel/toolkits/ask_news_toolkit.py,sha256=WfWaqwEo1Apbil3-Rb5y65Ws43NU4rAFWZu5VHe4los,23448
@@ -329,6 +329,7 @@ camel/toolkits/open_api_toolkit.py,sha256=Venfq8JwTMQfzRzzB7AYmYUMEX35hW0BjIv_oz
329
329
  camel/toolkits/openai_agent_toolkit.py,sha256=hT2ancdQigngAiY1LNnGJzZeiBDHUxrRGv6BdZTJizc,4696
330
330
  camel/toolkits/openbb_toolkit.py,sha256=8yBZL9E2iSgskosBQhD3pTP56oV6gerWpFjIJc_2UMo,28935
331
331
  camel/toolkits/page_script.js,sha256=gypbuQ_gn_oa3rQDoCN_q-kJ0jND1eSvY-30PufPZmQ,12613
332
+ camel/toolkits/playwright_mcp_toolkit.py,sha256=_TcCRA3ECaWO0pqUjoZ2whfWXgvank4A4isqNjgZqqc,2403
332
333
  camel/toolkits/pubmed_toolkit.py,sha256=VGl8KeyWi7pjb2kEhFBLmpBlP9ezv8JyWRHtEVTQ6nQ,12227
333
334
  camel/toolkits/pulse_mcp_search_toolkit.py,sha256=uLUpm19uC_4xLJow0gGVS9f-5T5EW2iRAXdJ4nqJG-A,4783
334
335
  camel/toolkits/pyautogui_toolkit.py,sha256=Q810fm8cFvElRory7B74aqS2YV6BOpdRE6jkewoM8xc,16093
@@ -374,7 +375,7 @@ camel/toolkits/open_api_specs/web_scraper/openapi.yaml,sha256=u_WalQ01e8W1D27VnZ
374
375
  camel/toolkits/open_api_specs/web_scraper/paths/__init__.py,sha256=OKCZrQCDwaWtXIN_2rA9FSqEvgpQRieRoHh7Ek6N16A,702
375
376
  camel/toolkits/open_api_specs/web_scraper/paths/scraper.py,sha256=aWy1_ppV4NVVEZfnbN3tu9XA9yAPAC9bRStJ5JuXMRU,1117
376
377
  camel/types/__init__.py,sha256=Xdkjh7TQDAQUQ7pFnt7i_rLjjOhsJJnhzusARcFc94Q,2311
377
- camel/types/enums.py,sha256=rJ08vZ-Rw19Jl5u0jDN5jE943NIFwQZlxaDZPY_qf8w,58190
378
+ camel/types/enums.py,sha256=flMhlgYsmNn_5RdAMTR2_W-Zk4jzuLEtbRzyAEDt2lY,58511
378
379
  camel/types/openai_types.py,sha256=8ZFzLe-zGmKNPfuVZFzxlxAX98lGf18gtrPhOgMmzus,2104
379
380
  camel/types/unified_model_type.py,sha256=TpiUmJ3IuX8LNLtTUeUcVM7U82r4ClSq3ZQlNX3ODKs,5351
380
381
  camel/types/agents/__init__.py,sha256=cbvVkogPoZgcwZrgxLH6EtpGXk0kavF79nOic0Dc1vg,786
@@ -398,7 +399,7 @@ camel/verifiers/math_verifier.py,sha256=tA1D4S0sm8nsWISevxSN0hvSVtIUpqmJhzqfbuMo
398
399
  camel/verifiers/models.py,sha256=GdxYPr7UxNrR1577yW4kyroRcLGfd-H1GXgv8potDWU,2471
399
400
  camel/verifiers/physics_verifier.py,sha256=c1grrRddcrVN7szkxhv2QirwY9viIRSITWeWFF5HmLs,30187
400
401
  camel/verifiers/python_verifier.py,sha256=ogTz77wODfEcDN4tMVtiSkRQyoiZbHPY2fKybn59lHw,20558
401
- camel_ai-0.2.55.dist-info/METADATA,sha256=Yto1QgbDtuxobmL7TkrmuX9T5OnZPlZ_T1V-l2_eaN0,44254
402
- camel_ai-0.2.55.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
403
- camel_ai-0.2.55.dist-info/licenses/LICENSE,sha256=id0nB2my5kG0xXeimIu5zZrbHLS6EQvxvkKkzIHaT2k,11343
404
- camel_ai-0.2.55.dist-info/RECORD,,
402
+ camel_ai-0.2.57.dist-info/METADATA,sha256=3p4MukfjBAa2HETSYZYTEL-DBzUWxMxuLmfxlbZhRfo,44254
403
+ camel_ai-0.2.57.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
404
+ camel_ai-0.2.57.dist-info/licenses/LICENSE,sha256=id0nB2my5kG0xXeimIu5zZrbHLS6EQvxvkKkzIHaT2k,11343
405
+ camel_ai-0.2.57.dist-info/RECORD,,