camel-ai 0.2.76a5__py3-none-any.whl → 0.2.76a6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of camel-ai might be problematic. Click here for more details.
- camel/__init__.py +1 -1
- camel/models/azure_openai_model.py +0 -6
- camel/models/openai_compatible_model.py +0 -6
- camel/models/zhipuai_model.py +61 -2
- camel/toolkits/terminal_toolkit/terminal_toolkit.py +15 -0
- {camel_ai-0.2.76a5.dist-info → camel_ai-0.2.76a6.dist-info}/METADATA +1 -1
- {camel_ai-0.2.76a5.dist-info → camel_ai-0.2.76a6.dist-info}/RECORD +9 -9
- {camel_ai-0.2.76a5.dist-info → camel_ai-0.2.76a6.dist-info}/WHEEL +0 -0
- {camel_ai-0.2.76a5.dist-info → camel_ai-0.2.76a6.dist-info}/licenses/LICENSE +0 -0
camel/__init__.py
CHANGED
|
@@ -247,9 +247,6 @@ class AzureOpenAIModel(BaseModelBackend):
|
|
|
247
247
|
)
|
|
248
248
|
is_streaming = self.model_config_dict.get("stream", False)
|
|
249
249
|
if response_format:
|
|
250
|
-
result: Union[ChatCompletion, Stream[ChatCompletionChunk]] = (
|
|
251
|
-
self._request_parse(messages, response_format, tools)
|
|
252
|
-
)
|
|
253
250
|
if is_streaming:
|
|
254
251
|
return self._request_stream_parse(
|
|
255
252
|
messages, response_format, tools
|
|
@@ -308,9 +305,6 @@ class AzureOpenAIModel(BaseModelBackend):
|
|
|
308
305
|
)
|
|
309
306
|
is_streaming = self.model_config_dict.get("stream", False)
|
|
310
307
|
if response_format:
|
|
311
|
-
result: Union[
|
|
312
|
-
ChatCompletion, AsyncStream[ChatCompletionChunk]
|
|
313
|
-
] = await self._arequest_parse(messages, response_format, tools)
|
|
314
308
|
if is_streaming:
|
|
315
309
|
return await self._arequest_stream_parse(
|
|
316
310
|
messages, response_format, tools
|
|
@@ -190,9 +190,6 @@ class OpenAICompatibleModel(BaseModelBackend):
|
|
|
190
190
|
is_streaming = self.model_config_dict.get("stream", False)
|
|
191
191
|
|
|
192
192
|
if response_format:
|
|
193
|
-
result: Union[ChatCompletion, Stream[ChatCompletionChunk]] = (
|
|
194
|
-
self._request_parse(messages, response_format, tools)
|
|
195
|
-
)
|
|
196
193
|
if is_streaming:
|
|
197
194
|
# Use streaming parse for structured output
|
|
198
195
|
return self._request_stream_parse(
|
|
@@ -256,9 +253,6 @@ class OpenAICompatibleModel(BaseModelBackend):
|
|
|
256
253
|
is_streaming = self.model_config_dict.get("stream", False)
|
|
257
254
|
|
|
258
255
|
if response_format:
|
|
259
|
-
result: Union[
|
|
260
|
-
ChatCompletion, AsyncStream[ChatCompletionChunk]
|
|
261
|
-
] = await self._arequest_parse(messages, response_format, tools)
|
|
262
256
|
if is_streaming:
|
|
263
257
|
# Use streaming parse for structured output
|
|
264
258
|
return await self._arequest_stream_parse(
|
camel/models/zhipuai_model.py
CHANGED
|
@@ -13,16 +13,26 @@
|
|
|
13
13
|
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
14
|
|
|
15
15
|
import os
|
|
16
|
-
from typing import Any, Dict, Optional, Union
|
|
16
|
+
from typing import Any, Dict, List, Optional, Type, Union
|
|
17
|
+
|
|
18
|
+
from pydantic import BaseModel
|
|
17
19
|
|
|
18
20
|
from camel.configs import ZhipuAIConfig
|
|
21
|
+
from camel.logger import get_logger
|
|
22
|
+
from camel.messages import OpenAIMessage
|
|
23
|
+
from camel.models._utils import try_modify_message_with_format
|
|
19
24
|
from camel.models.openai_compatible_model import OpenAICompatibleModel
|
|
20
|
-
from camel.types import
|
|
25
|
+
from camel.types import (
|
|
26
|
+
ChatCompletion,
|
|
27
|
+
ModelType,
|
|
28
|
+
)
|
|
21
29
|
from camel.utils import (
|
|
22
30
|
BaseTokenCounter,
|
|
23
31
|
api_keys_required,
|
|
24
32
|
)
|
|
25
33
|
|
|
34
|
+
logger = get_logger(__name__)
|
|
35
|
+
|
|
26
36
|
|
|
27
37
|
class ZhipuAIModel(OpenAICompatibleModel):
|
|
28
38
|
r"""ZhipuAI API in a unified OpenAICompatibleModel interface.
|
|
@@ -85,3 +95,52 @@ class ZhipuAIModel(OpenAICompatibleModel):
|
|
|
85
95
|
max_retries=max_retries,
|
|
86
96
|
**kwargs,
|
|
87
97
|
)
|
|
98
|
+
|
|
99
|
+
def _request_parse(
|
|
100
|
+
self,
|
|
101
|
+
messages: List[OpenAIMessage],
|
|
102
|
+
response_format: Type[BaseModel],
|
|
103
|
+
tools: Optional[List[Dict[str, Any]]] = None,
|
|
104
|
+
) -> ChatCompletion:
|
|
105
|
+
import copy
|
|
106
|
+
|
|
107
|
+
request_config = copy.deepcopy(self.model_config_dict)
|
|
108
|
+
request_config.pop("stream", None)
|
|
109
|
+
if tools is not None:
|
|
110
|
+
request_config["tools"] = tools
|
|
111
|
+
|
|
112
|
+
try_modify_message_with_format(messages[-1], response_format)
|
|
113
|
+
request_config["response_format"] = {"type": "json_object"}
|
|
114
|
+
try:
|
|
115
|
+
return self._client.beta.chat.completions.parse(
|
|
116
|
+
messages=messages,
|
|
117
|
+
model=self.model_type,
|
|
118
|
+
**request_config,
|
|
119
|
+
)
|
|
120
|
+
except Exception as e:
|
|
121
|
+
logger.error(f"Fallback attempt also failed: {e}")
|
|
122
|
+
raise
|
|
123
|
+
|
|
124
|
+
async def _arequest_parse(
|
|
125
|
+
self,
|
|
126
|
+
messages: List[OpenAIMessage],
|
|
127
|
+
response_format: Type[BaseModel],
|
|
128
|
+
tools: Optional[List[Dict[str, Any]]] = None,
|
|
129
|
+
) -> ChatCompletion:
|
|
130
|
+
import copy
|
|
131
|
+
|
|
132
|
+
request_config = copy.deepcopy(self.model_config_dict)
|
|
133
|
+
request_config.pop("stream", None)
|
|
134
|
+
if tools is not None:
|
|
135
|
+
request_config["tools"] = tools
|
|
136
|
+
try_modify_message_with_format(messages[-1], response_format)
|
|
137
|
+
request_config["response_format"] = {"type": "json_object"}
|
|
138
|
+
try:
|
|
139
|
+
return await self._async_client.beta.chat.completions.parse(
|
|
140
|
+
messages=messages,
|
|
141
|
+
model=self.model_type,
|
|
142
|
+
**request_config,
|
|
143
|
+
)
|
|
144
|
+
except Exception as e:
|
|
145
|
+
logger.error(f"Fallback attempt also failed: {e}")
|
|
146
|
+
raise
|
|
@@ -15,6 +15,7 @@ import atexit
|
|
|
15
15
|
import os
|
|
16
16
|
import platform
|
|
17
17
|
import select
|
|
18
|
+
import shlex
|
|
18
19
|
import subprocess
|
|
19
20
|
import sys
|
|
20
21
|
import threading
|
|
@@ -191,6 +192,20 @@ class TerminalToolkit(BaseToolkit):
|
|
|
191
192
|
f"Successfully attached to Docker container "
|
|
192
193
|
f"'{docker_container_name}'."
|
|
193
194
|
)
|
|
195
|
+
# Ensure the working directory exists inside the container
|
|
196
|
+
if self.docker_workdir:
|
|
197
|
+
try:
|
|
198
|
+
quoted_dir = shlex.quote(self.docker_workdir)
|
|
199
|
+
mkdir_cmd = f'sh -lc "mkdir -p -- {quoted_dir}"'
|
|
200
|
+
_init = self.docker_api_client.exec_create(
|
|
201
|
+
self.container.id, mkdir_cmd
|
|
202
|
+
)
|
|
203
|
+
self.docker_api_client.exec_start(_init['Id'])
|
|
204
|
+
except Exception as e:
|
|
205
|
+
logger.warning(
|
|
206
|
+
f"[Docker] Failed to ensure workdir "
|
|
207
|
+
f"'{self.docker_workdir}': {e}"
|
|
208
|
+
)
|
|
194
209
|
except NotFound:
|
|
195
210
|
raise RuntimeError(
|
|
196
211
|
f"Docker container '{docker_container_name}' not found."
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
camel/__init__.py,sha256=
|
|
1
|
+
camel/__init__.py,sha256=q3b2abfCj3uAj7Lc_RfTP2ckw3bMdzWeJKFIakJdNsI,901
|
|
2
2
|
camel/generators.py,sha256=JRqj9_m1PF4qT6UtybzTQ-KBT9MJQt18OAAYvQ_fr2o,13844
|
|
3
3
|
camel/human.py,sha256=Xg8x1cS5KK4bQ1SDByiHZnzsRpvRP-KZViNvmu38xo4,5475
|
|
4
4
|
camel/logger.py,sha256=WgEwael_eT6D-lVAKHpKIpwXSTjvLbny5jbV1Ab8lnA,5760
|
|
@@ -182,7 +182,7 @@ camel/models/aiml_model.py,sha256=jmkQlqA7NkDXn2xN-98wtanI2KHbcMWQwNPjc38wyLI,35
|
|
|
182
182
|
camel/models/amd_model.py,sha256=4LMjbRH-ghCdTapK9nhbgkOs9ZKLxQ9FXEg1esgmNZM,4124
|
|
183
183
|
camel/models/anthropic_model.py,sha256=GlerIhIc7uGhzIsQoZw-_8CGOdcZT8DC_95V3hx3q-4,8350
|
|
184
184
|
camel/models/aws_bedrock_model.py,sha256=0JdsLxfi-coI8LtSPNewsaeR43CwC0qG2Gm_iY-ZCdo,4073
|
|
185
|
-
camel/models/azure_openai_model.py,sha256=
|
|
185
|
+
camel/models/azure_openai_model.py,sha256=gQVSYuOR9ECv4UKiph_zM7QmLQ2FLqJ5MQAqLkPN2FQ,17474
|
|
186
186
|
camel/models/base_audio_model.py,sha256=_VUWh1L3rh8mldNvM5R6jBOKtvmTeBKJyRxAdPJmPlY,3324
|
|
187
187
|
camel/models/base_model.py,sha256=aty9oIZowjt5Mxi0aB7ifHUmQO-78UMfUFLPNm10lGg,20164
|
|
188
188
|
camel/models/cohere_model.py,sha256=9H2F8bjwxPgwSwgPPRoOy090dQKBboQxnlk-94FoDIk,16719
|
|
@@ -207,7 +207,7 @@ camel/models/novita_model.py,sha256=9rmiAShSQOIxTzdhxZaI7Xw1ZQkYeQ-yiL1VmIWqsWc,
|
|
|
207
207
|
camel/models/nvidia_model.py,sha256=C26XCUQRe2O7ySGIqTihySovDHRk19WN0GOsSHupYQE,3510
|
|
208
208
|
camel/models/ollama_model.py,sha256=pz8mvrCw6dv4Kqfqrg89ZFI_4Y_ppRhvPXVcnkxYGlk,4198
|
|
209
209
|
camel/models/openai_audio_models.py,sha256=BSixkXlc8xirQLl2qCla-g6_y9wDLnMZVHukHrhzw98,13344
|
|
210
|
-
camel/models/openai_compatible_model.py,sha256=
|
|
210
|
+
camel/models/openai_compatible_model.py,sha256=K-AqO3YulF1S5_Y4lmMS_kE26LnK6gm2zVDZiPNszj0,16537
|
|
211
211
|
camel/models/openai_model.py,sha256=Y2vV4UlGk1yBx3q-JSmISO9kfausvqv-tH4awRBHR9E,19454
|
|
212
212
|
camel/models/openrouter_model.py,sha256=V1DKBPoYJ6oCDDBqNK0RrK2JWq4oOWlbo3qu0lT4CbA,3514
|
|
213
213
|
camel/models/ppio_model.py,sha256=G559Seb4KLU4FlYgSQ2UD_A6xOacX0eL6DrE_Hhmpv0,3645
|
|
@@ -223,7 +223,7 @@ camel/models/vllm_model.py,sha256=PwotGvmYo0zNsY5WVfd1R8IgCt2oQHO7ccmnlqVyDCs,42
|
|
|
223
223
|
camel/models/volcano_model.py,sha256=joWFRGbsEpHFzRYRF7nv91mVADe-Y77NozEhfCnevSo,3375
|
|
224
224
|
camel/models/watsonx_model.py,sha256=24sN5vrniwNI2mPjcByyF4tArFiwqo1FnyRQ1L5yPbE,10876
|
|
225
225
|
camel/models/yi_model.py,sha256=exlJCQkUu2_Gx7qDwo3yBf6RrL6h8yMFRFMq6AXLwew,3506
|
|
226
|
-
camel/models/zhipuai_model.py,sha256=
|
|
226
|
+
camel/models/zhipuai_model.py,sha256=Og2eYOnpDlhE1U5njadRbJkxmN83XiRMTYIxN8mgr7M,5529
|
|
227
227
|
camel/models/reward/__init__.py,sha256=MqPN6wXh7Y1SoeNoFlYaMG6xHzLG0CYsv_3kB2atIQk,984
|
|
228
228
|
camel/models/reward/base_reward_model.py,sha256=erCmBCl51oFNjEHCXWxdHKIPNVJnQlNGgYBDn2bFD-Q,2064
|
|
229
229
|
camel/models/reward/evaluator.py,sha256=54ev5MuQ_5Tp0-LGO59EIuIkGrVMbtXXqpBR5Ps9kCM,2426
|
|
@@ -452,7 +452,7 @@ camel/toolkits/open_api_specs/web_scraper/openapi.yaml,sha256=u_WalQ01e8W1D27VnZ
|
|
|
452
452
|
camel/toolkits/open_api_specs/web_scraper/paths/__init__.py,sha256=OKCZrQCDwaWtXIN_2rA9FSqEvgpQRieRoHh7Ek6N16A,702
|
|
453
453
|
camel/toolkits/open_api_specs/web_scraper/paths/scraper.py,sha256=aWy1_ppV4NVVEZfnbN3tu9XA9yAPAC9bRStJ5JuXMRU,1117
|
|
454
454
|
camel/toolkits/terminal_toolkit/__init__.py,sha256=yE66haKm-NwoNJwtnCmcRpANtWxQB_dZoD8O5iqXt_Y,786
|
|
455
|
-
camel/toolkits/terminal_toolkit/terminal_toolkit.py,sha256=
|
|
455
|
+
camel/toolkits/terminal_toolkit/terminal_toolkit.py,sha256=Mp7C41d_T3guUNU7iXXOFKLNopTUyNGckzQ0JDCQ9og,36784
|
|
456
456
|
camel/toolkits/terminal_toolkit/utils.py,sha256=N0Dz5EEoJRfkP_LEVXepP5XQB57z8Hqt30Xt6SkGgOg,19221
|
|
457
457
|
camel/types/__init__.py,sha256=EOmWlqS7aE5cB51_Vv7vHUexKeBbx9FSsfynl5vKjwo,2565
|
|
458
458
|
camel/types/enums.py,sha256=nnI2St4xY57T90ges9MAzuB75geAwo-kRcspGMssZkE,70287
|
|
@@ -485,7 +485,7 @@ camel/verifiers/math_verifier.py,sha256=tA1D4S0sm8nsWISevxSN0hvSVtIUpqmJhzqfbuMo
|
|
|
485
485
|
camel/verifiers/models.py,sha256=GdxYPr7UxNrR1577yW4kyroRcLGfd-H1GXgv8potDWU,2471
|
|
486
486
|
camel/verifiers/physics_verifier.py,sha256=c1grrRddcrVN7szkxhv2QirwY9viIRSITWeWFF5HmLs,30187
|
|
487
487
|
camel/verifiers/python_verifier.py,sha256=ogTz77wODfEcDN4tMVtiSkRQyoiZbHPY2fKybn59lHw,20558
|
|
488
|
-
camel_ai-0.2.
|
|
489
|
-
camel_ai-0.2.
|
|
490
|
-
camel_ai-0.2.
|
|
491
|
-
camel_ai-0.2.
|
|
488
|
+
camel_ai-0.2.76a6.dist-info/METADATA,sha256=2_ySY5FlyudCStDAMBurKcM-VXSgL6OrwWMApda6o2k,55432
|
|
489
|
+
camel_ai-0.2.76a6.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
490
|
+
camel_ai-0.2.76a6.dist-info/licenses/LICENSE,sha256=id0nB2my5kG0xXeimIu5zZrbHLS6EQvxvkKkzIHaT2k,11343
|
|
491
|
+
camel_ai-0.2.76a6.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|