camel-ai 0.2.49__py3-none-any.whl → 0.2.51__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of camel-ai might be problematic. Click here for more details.
- camel/__init__.py +1 -1
- camel/agents/chat_agent.py +159 -15
- camel/configs/__init__.py +6 -0
- camel/configs/modelscope_config.py +4 -1
- camel/configs/novita_config.py +102 -0
- camel/configs/qwen_config.py +0 -7
- camel/configs/watsonx_config.py +96 -0
- camel/environments/single_step.py +79 -11
- camel/models/__init__.py +4 -0
- camel/models/azure_openai_model.py +27 -9
- camel/models/model_factory.py +29 -6
- camel/models/modelscope_model.py +175 -2
- camel/models/novita_model.py +95 -0
- camel/models/ollama_model.py +15 -10
- camel/models/qwen_model.py +175 -2
- camel/models/vllm_model.py +15 -9
- camel/models/watsonx_model.py +253 -0
- camel/societies/workforce/prompts.py +31 -4
- camel/societies/workforce/workforce.py +1 -1
- camel/toolkits/browser_toolkit.py +53 -55
- camel/types/enums.py +226 -1
- camel/types/unified_model_type.py +10 -0
- camel/utils/__init__.py +2 -0
- camel/utils/filename.py +80 -0
- camel/verifiers/__init__.py +2 -0
- camel/verifiers/physics_verifier.py +881 -0
- camel/verifiers/python_verifier.py +16 -31
- {camel_ai-0.2.49.dist-info → camel_ai-0.2.51.dist-info}/METADATA +4 -1
- {camel_ai-0.2.49.dist-info → camel_ai-0.2.51.dist-info}/RECORD +31 -25
- {camel_ai-0.2.49.dist-info → camel_ai-0.2.51.dist-info}/WHEEL +0 -0
- {camel_ai-0.2.49.dist-info → camel_ai-0.2.51.dist-info}/licenses/LICENSE +0 -0
|
@@ -47,7 +47,11 @@ from camel.toolkits.base import BaseToolkit
|
|
|
47
47
|
from camel.toolkits.function_tool import FunctionTool
|
|
48
48
|
from camel.toolkits.video_analysis_toolkit import VideoAnalysisToolkit
|
|
49
49
|
from camel.types import ModelPlatformType, ModelType
|
|
50
|
-
from camel.utils import
|
|
50
|
+
from camel.utils import (
|
|
51
|
+
dependencies_required,
|
|
52
|
+
retry_on_error,
|
|
53
|
+
sanitize_filename,
|
|
54
|
+
)
|
|
51
55
|
|
|
52
56
|
logger = get_logger(__name__)
|
|
53
57
|
|
|
@@ -137,7 +141,7 @@ def _get_str(d: Any, k: str) -> str:
|
|
|
137
141
|
if isinstance(val, str):
|
|
138
142
|
return val
|
|
139
143
|
raise TypeError(
|
|
140
|
-
f"Expected a string for key '{k}',
|
|
144
|
+
f"Expected a string for key '{k}', but got {type(val).__name__}"
|
|
141
145
|
)
|
|
142
146
|
|
|
143
147
|
|
|
@@ -158,7 +162,7 @@ def _get_bool(d: Any, k: str) -> bool:
|
|
|
158
162
|
if isinstance(val, bool):
|
|
159
163
|
return val
|
|
160
164
|
raise TypeError(
|
|
161
|
-
f"Expected a boolean for key '{k}',
|
|
165
|
+
f"Expected a boolean for key '{k}', but got {type(val).__name__}"
|
|
162
166
|
)
|
|
163
167
|
|
|
164
168
|
|
|
@@ -219,7 +223,7 @@ def _parse_json_output(text: str) -> Dict[str, Any]:
|
|
|
219
223
|
return {}
|
|
220
224
|
|
|
221
225
|
|
|
222
|
-
def _reload_image(image: Image.Image):
|
|
226
|
+
def _reload_image(image: Image.Image) -> Image.Image:
|
|
223
227
|
buffer = io.BytesIO()
|
|
224
228
|
image.save(buffer, format="PNG")
|
|
225
229
|
buffer.seek(0)
|
|
@@ -432,6 +436,7 @@ class BaseBrowser:
|
|
|
432
436
|
headless=True,
|
|
433
437
|
cache_dir: Optional[str] = None,
|
|
434
438
|
channel: Literal["chrome", "msedge", "chromium"] = "chromium",
|
|
439
|
+
cookie_json_path: Optional[str] = None,
|
|
435
440
|
):
|
|
436
441
|
r"""Initialize the WebBrowser instance.
|
|
437
442
|
|
|
@@ -441,6 +446,10 @@ class BaseBrowser:
|
|
|
441
446
|
channel (Literal["chrome", "msedge", "chromium"]): The browser
|
|
442
447
|
channel to use. Must be one of "chrome", "msedge", or
|
|
443
448
|
"chromium".
|
|
449
|
+
cookie_json_path (Optional[str]): Path to a JSON file containing
|
|
450
|
+
authentication cookies and browser storage state. If provided
|
|
451
|
+
and the file exists, the browser will load this state to maintain
|
|
452
|
+
authenticated sessions without requiring manual login.
|
|
444
453
|
|
|
445
454
|
Returns:
|
|
446
455
|
None
|
|
@@ -455,6 +464,7 @@ class BaseBrowser:
|
|
|
455
464
|
self._ensure_browser_installed()
|
|
456
465
|
self.playwright = sync_playwright().start()
|
|
457
466
|
self.page_history: list = [] # stores the history of visited pages
|
|
467
|
+
self.cookie_json_path = cookie_json_path
|
|
458
468
|
|
|
459
469
|
# Set the cache directory
|
|
460
470
|
self.cache_dir = "tmp/" if cache_dir is None else cache_dir
|
|
@@ -479,8 +489,18 @@ class BaseBrowser:
|
|
|
479
489
|
self.browser = self.playwright.chromium.launch(
|
|
480
490
|
headless=self.headless, channel=self.channel
|
|
481
491
|
)
|
|
482
|
-
|
|
483
|
-
|
|
492
|
+
|
|
493
|
+
# Check if cookie file exists before using it to maintain
|
|
494
|
+
# authenticated sessions. This prevents errors when the cookie file
|
|
495
|
+
# doesn't exist
|
|
496
|
+
if self.cookie_json_path and os.path.exists(self.cookie_json_path):
|
|
497
|
+
self.context = self.browser.new_context(
|
|
498
|
+
accept_downloads=True, storage_state=self.cookie_json_path
|
|
499
|
+
)
|
|
500
|
+
else:
|
|
501
|
+
self.context = self.browser.new_context(
|
|
502
|
+
accept_downloads=True,
|
|
503
|
+
)
|
|
484
504
|
# Create a new page
|
|
485
505
|
self.page = self.context.new_page()
|
|
486
506
|
|
|
@@ -550,12 +570,9 @@ class BaseBrowser:
|
|
|
550
570
|
# Get url name to form a file name
|
|
551
571
|
# Use urlparser for a safer extraction the url name
|
|
552
572
|
parsed_url = urllib.parse.urlparse(self.page_url)
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
url_name = url_name.replace(char, "_")
|
|
557
|
-
|
|
558
|
-
# Get formatted time: mmddhhmmss
|
|
573
|
+
# Max length is set to 241 as there are 10 characters for the
|
|
574
|
+
# timestamp and 4 characters for the file extension:
|
|
575
|
+
url_name = sanitize_filename(str(parsed_url.path), max_length=241)
|
|
559
576
|
timestamp = datetime.datetime.now().strftime("%m%d%H%M%S")
|
|
560
577
|
file_path = os.path.join(
|
|
561
578
|
self.cache_dir, f"{url_name}_{timestamp}.png"
|
|
@@ -663,24 +680,25 @@ class BaseBrowser:
|
|
|
663
680
|
directory.
|
|
664
681
|
|
|
665
682
|
Returns:
|
|
666
|
-
Tuple[Image.Image, str]: A tuple containing the screenshot image
|
|
667
|
-
and
|
|
683
|
+
Tuple[Image.Image, Union[str, None]]: A tuple containing the screenshot image
|
|
684
|
+
and an optional path to the image file if saved, otherwise
|
|
685
|
+
:obj:`None`.
|
|
668
686
|
"""
|
|
669
687
|
|
|
670
688
|
self._wait_for_load()
|
|
671
689
|
screenshot, _ = self.get_screenshot(save_image=False)
|
|
672
690
|
rects = self.get_interactive_elements()
|
|
673
691
|
|
|
674
|
-
file_path = None
|
|
675
|
-
comp,
|
|
692
|
+
file_path: str | None = None
|
|
693
|
+
comp, _, _, _ = add_set_of_mark(
|
|
676
694
|
screenshot,
|
|
677
695
|
rects, # type: ignore[arg-type]
|
|
678
696
|
)
|
|
679
697
|
if save_image:
|
|
680
698
|
parsed_url = urllib.parse.urlparse(self.page_url)
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
699
|
+
# Max length is set to 241 as there are 10 characters for the
|
|
700
|
+
# timestamp and 4 characters for the file extension:
|
|
701
|
+
url_name = sanitize_filename(str(parsed_url.path), max_length=241)
|
|
684
702
|
timestamp = datetime.datetime.now().strftime("%m%d%H%M%S")
|
|
685
703
|
file_path = os.path.join(
|
|
686
704
|
self.cache_dir, f"{url_name}_{timestamp}.png"
|
|
@@ -991,6 +1009,7 @@ class BrowserToolkit(BaseToolkit):
|
|
|
991
1009
|
web_agent_model: Optional[BaseModelBackend] = None,
|
|
992
1010
|
planning_agent_model: Optional[BaseModelBackend] = None,
|
|
993
1011
|
output_language: str = "en",
|
|
1012
|
+
cookie_json_path: Optional[str] = None,
|
|
994
1013
|
):
|
|
995
1014
|
r"""Initialize the BrowserToolkit instance.
|
|
996
1015
|
|
|
@@ -1008,10 +1027,18 @@ class BrowserToolkit(BaseToolkit):
|
|
|
1008
1027
|
backend for the planning agent.
|
|
1009
1028
|
output_language (str): The language to use for output.
|
|
1010
1029
|
(default: :obj:`"en`")
|
|
1030
|
+
cookie_json_path (Optional[str]): Path to a JSON file containing
|
|
1031
|
+
authentication cookies and browser storage state. If provided
|
|
1032
|
+
and the file exists, the browser will load this state to maintain
|
|
1033
|
+
authenticated sessions without requiring manual login.
|
|
1034
|
+
(default: :obj:`None`)
|
|
1011
1035
|
"""
|
|
1012
1036
|
|
|
1013
1037
|
self.browser = BaseBrowser(
|
|
1014
|
-
headless=headless,
|
|
1038
|
+
headless=headless,
|
|
1039
|
+
cache_dir=cache_dir,
|
|
1040
|
+
channel=channel,
|
|
1041
|
+
cookie_json_path=cookie_json_path,
|
|
1015
1042
|
)
|
|
1016
1043
|
|
|
1017
1044
|
self.history_window = history_window
|
|
@@ -1103,7 +1130,7 @@ Here are the current available browser functions you can use:
|
|
|
1103
1130
|
|
|
1104
1131
|
Here are the latest {self.history_window} trajectory (at most) you have taken:
|
|
1105
1132
|
<history>
|
|
1106
|
-
{self.history[-self.history_window:]}
|
|
1133
|
+
{self.history[-self.history_window :]}
|
|
1107
1134
|
</history>
|
|
1108
1135
|
|
|
1109
1136
|
Your output should be in json format, including the following fields:
|
|
@@ -1322,36 +1349,6 @@ Please find the final answer, or give valuable insights and founds (e.g. if prev
|
|
|
1322
1349
|
resp = self.web_agent.step(message)
|
|
1323
1350
|
return resp.msgs[0].content
|
|
1324
1351
|
|
|
1325
|
-
def _make_reflection(self, task_prompt: str) -> str:
|
|
1326
|
-
r"""Make a reflection about the current state and the task prompt."""
|
|
1327
|
-
|
|
1328
|
-
reflection_prompt = f"""
|
|
1329
|
-
Now we are working on a complex task that requires multi-step browser interaction. The task is: <task>{task_prompt}</task>
|
|
1330
|
-
To achieve this goal, we have made a series of observations, reasonings, and actions. We have also made a reflection on previous states.
|
|
1331
|
-
|
|
1332
|
-
Here are the global available browser functions we can use:
|
|
1333
|
-
{AVAILABLE_ACTIONS_PROMPT}
|
|
1334
|
-
|
|
1335
|
-
Here are the latest {self.history_window} trajectory (at most) we have taken:
|
|
1336
|
-
<history>{self.history[-self.history_window:]}</history>
|
|
1337
|
-
|
|
1338
|
-
The image provided is the current state of the browser, where we have marked interactive elements.
|
|
1339
|
-
Please carefully examine the requirements of the task, and the current state of the browser, and then make reflections on the previous steps, thinking about whether they are helpful or not, and why, offering detailed feedback and suggestions for the next steps.
|
|
1340
|
-
Your output should be in json format, including the following fields:
|
|
1341
|
-
- `reflection`: The reflection about the previous steps, thinking about whether they are helpful or not, and why, offering detailed feedback.
|
|
1342
|
-
- `suggestion`: The suggestion for the next steps, offering detailed suggestions, including the common solutions to the overall task based on the current state of the browser.
|
|
1343
|
-
"""
|
|
1344
|
-
som_image, _ = self.browser.get_som_screenshot()
|
|
1345
|
-
img = _reload_image(som_image)
|
|
1346
|
-
|
|
1347
|
-
message = BaseMessage.make_user_message(
|
|
1348
|
-
role_name='user', content=reflection_prompt, image_list=[img]
|
|
1349
|
-
)
|
|
1350
|
-
|
|
1351
|
-
resp = self.web_agent.step(message)
|
|
1352
|
-
|
|
1353
|
-
return resp.msgs[0].content
|
|
1354
|
-
|
|
1355
1352
|
def _task_planning(self, task_prompt: str, start_url: str) -> str:
|
|
1356
1353
|
r"""Plan the task based on the given task prompt."""
|
|
1357
1354
|
|
|
@@ -1396,7 +1393,7 @@ In order to solve the task, we made a detailed plan previously. Here is the deta
|
|
|
1396
1393
|
<detailed plan>{detailed_plan}</detailed plan>
|
|
1397
1394
|
|
|
1398
1395
|
According to the task above, we have made a series of observations, reasonings, and actions. Here are the latest {self.history_window} trajectory (at most) we have taken:
|
|
1399
|
-
<history>{self.history[-self.history_window:]}</history>
|
|
1396
|
+
<history>{self.history[-self.history_window :]}</history>
|
|
1400
1397
|
|
|
1401
1398
|
However, the task is not completed yet. As the task is partially observable, we may need to replan the task based on the current state of the browser if necessary.
|
|
1402
1399
|
Now please carefully examine the current task planning schema, and our history actions, and then judge whether the task needs to be fundamentally replanned. If so, please provide a detailed replanned schema (including the restated overall task).
|
|
@@ -1422,7 +1419,8 @@ Your output should be in json format, including the following fields:
|
|
|
1422
1419
|
def browse_url(
|
|
1423
1420
|
self, task_prompt: str, start_url: str, round_limit: int = 12
|
|
1424
1421
|
) -> str:
|
|
1425
|
-
r"""A powerful toolkit which can simulate the browser interaction to
|
|
1422
|
+
r"""A powerful toolkit which can simulate the browser interaction to
|
|
1423
|
+
solve the task which needs multi-step actions.
|
|
1426
1424
|
|
|
1427
1425
|
Args:
|
|
1428
1426
|
task_prompt (str): The task prompt to solve.
|
|
@@ -1480,7 +1478,7 @@ Your output should be in json format, including the following fields:
|
|
|
1480
1478
|
}
|
|
1481
1479
|
self.history.append(trajectory_info)
|
|
1482
1480
|
|
|
1483
|
-
#
|
|
1481
|
+
# Replan the task if necessary
|
|
1484
1482
|
if_need_replan, replanned_schema = self._task_replanning(
|
|
1485
1483
|
task_prompt, detailed_plan
|
|
1486
1484
|
)
|
|
@@ -1491,7 +1489,7 @@ Your output should be in json format, including the following fields:
|
|
|
1491
1489
|
if not task_completed:
|
|
1492
1490
|
simulation_result = f"""
|
|
1493
1491
|
The task is not completed within the round limit. Please check the last round {self.history_window} information to see if there is any useful information:
|
|
1494
|
-
<history>{self.history[-self.history_window:]}</history>
|
|
1492
|
+
<history>{self.history[-self.history_window :]}</history>
|
|
1495
1493
|
"""
|
|
1496
1494
|
|
|
1497
1495
|
else:
|
camel/types/enums.py
CHANGED
|
@@ -105,6 +105,7 @@ class ModelType(UnifiedModelType, Enum):
|
|
|
105
105
|
TOGETHER_LLAMA_4_SCOUT = "meta-llama/Llama-4-Scout-17B-16E-Instruct"
|
|
106
106
|
|
|
107
107
|
# PPIO platform models support tool calling
|
|
108
|
+
PPIO_DEEPSEEK_PROVER_V2_671B = "deepseek/deepseek-prover-v2-671b"
|
|
108
109
|
PPIO_DEEPSEEK_R1_TURBO = "deepseek/deepseek-r1-turbo"
|
|
109
110
|
PPIO_DEEPSEEK_V3_TURBO = "deepseek/deepseek-v3-turbo"
|
|
110
111
|
PPIO_DEEPSEEK_R1_COMMUNITY = "deepseek/deepseek-r1/community"
|
|
@@ -212,6 +213,10 @@ class ModelType(UnifiedModelType, Enum):
|
|
|
212
213
|
QWEN_MAX = "qwen-max"
|
|
213
214
|
QWEN_PLUS = "qwen-plus"
|
|
214
215
|
QWEN_TURBO = "qwen-turbo"
|
|
216
|
+
QWEN_PLUS_LATEST = "qwen-plus-latest"
|
|
217
|
+
QWEN_PLUS_2025_04_28 = "qwen-plus-2025-04-28"
|
|
218
|
+
QWEN_TURBO_LATEST = "qwen-turbo-latest"
|
|
219
|
+
QWEN_TURBO_2025_04_28 = "qwen-turbo-2025-04-28"
|
|
215
220
|
QWEN_LONG = "qwen-long"
|
|
216
221
|
QWEN_VL_MAX = "qwen-vl-max"
|
|
217
222
|
QWEN_VL_PLUS = "qwen-vl-plus"
|
|
@@ -270,6 +275,63 @@ class ModelType(UnifiedModelType, Enum):
|
|
|
270
275
|
AIML_MIXTRAL_8X7B = "mistralai/Mixtral-8x7B-Instruct-v0.1"
|
|
271
276
|
AIML_MISTRAL_7B_INSTRUCT = "mistralai/Mistral-7B-Instruct-v0.1"
|
|
272
277
|
|
|
278
|
+
# Novita platform models support tool calling
|
|
279
|
+
NOVITA_LLAMA_4_MAVERICK_17B = (
|
|
280
|
+
"meta-llama/llama-4-maverick-17b-128e-instruct-fp8"
|
|
281
|
+
)
|
|
282
|
+
NOVITA_LLAMA_4_SCOUT_17B = "meta-llama/llama-4-scout-17b-16e-instruct"
|
|
283
|
+
NOVITA_DEEPSEEK_V3_0324 = "deepseek/deepseek-v3-0324"
|
|
284
|
+
NOVITA_QWEN_2_5_V1_72B = "qwen/qwen2.5-vl-72b-instruct"
|
|
285
|
+
NOVITA_DEEPSEEK_V3_TURBO = "deepseek/deepseek-v3-turbo"
|
|
286
|
+
NOVITA_DEEPSEEK_R1_TURBO = "deepseek/deepseek-r1-turbo"
|
|
287
|
+
NOVITA_GEMMA_3_27B_IT = "google/gemma-3-27b-it"
|
|
288
|
+
NOVITA_QWEN_32B = "qwen/qwq-32b"
|
|
289
|
+
NOVITA_L3_8B_STHENO_V3_2 = "Sao10K/L3-8B-Stheno-v3.2"
|
|
290
|
+
NOVITA_MYTHOMAX_L2_13B = "gryphe/mythomax-l2-13b"
|
|
291
|
+
NOVITA_DEEPSEEK_R1_DISTILL_LLAMA_8B = (
|
|
292
|
+
"deepseek/deepseek-r1-distill-llama-8b"
|
|
293
|
+
)
|
|
294
|
+
NOVITA_DEEPSEEK_V3 = "deepseek/deepseek_v3"
|
|
295
|
+
NOVITA_LLAMA_3_1_8B = "meta-llama/llama-3.1-8b-instruct"
|
|
296
|
+
NOVITA_DEEPSEEK_R1_DISTILL_QWEN_14B = (
|
|
297
|
+
"deepseek/deepseek-r1-distill-qwen-14b"
|
|
298
|
+
)
|
|
299
|
+
NOVITA_LLAMA_3_3_70B = "meta-llama/llama-3.3-70b-instruct"
|
|
300
|
+
NOVITA_QWEN_2_5_72B = "qwen/qwen-2.5-72b-instruct"
|
|
301
|
+
NOVITA_MISTRAL_NEMO = "mistralai/mistral-nemo"
|
|
302
|
+
NOVITA_DEEPSEEK_R1_DISTILL_QWEN_32B = (
|
|
303
|
+
"deepseek/deepseek-r1-distill-qwen-32b"
|
|
304
|
+
)
|
|
305
|
+
NOVITA_LLAMA_3_8B = "meta-llama/llama-3-8b-instruct"
|
|
306
|
+
NOVITA_WIZARDLM_2_8X22B = "microsoft/wizardlm-2-8x22b"
|
|
307
|
+
NOVITA_DEEPSEEK_R1_DISTILL_LLAMA_70B = (
|
|
308
|
+
"deepseek/deepseek-r1-distill-llama-70b"
|
|
309
|
+
)
|
|
310
|
+
NOVITA_LLAMA_3_1_70B = "meta-llama/llama-3.1-70b-instruct"
|
|
311
|
+
NOVITA_GEMMA_2_9B_IT = "google/gemma-2-9b-it"
|
|
312
|
+
NOVITA_MISTRAL_7B = "mistralai/mistral-7b-instruct"
|
|
313
|
+
NOVITA_LLAMA_3_70B = "meta-llama/llama-3-70b-instruct"
|
|
314
|
+
NOVITA_DEEPSEEK_R1 = "deepseek/deepseek-r1"
|
|
315
|
+
NOVITA_HERMES_2_PRO_LLAMA_3_8B = "nousresearch/hermes-2-pro-llama-3-8b"
|
|
316
|
+
NOVITA_L3_70B_EURYALE_V2_1 = "sao10k/l3-70b-euryale-v2.1"
|
|
317
|
+
NOVITA_DOLPHIN_MIXTRAL_8X22B = (
|
|
318
|
+
"cognitivecomputations/dolphin-mixtral-8x22b"
|
|
319
|
+
)
|
|
320
|
+
NOVITA_AIROBOROS_L2_70B = "jondurbin/airoboros-l2-70b"
|
|
321
|
+
NOVITA_MIDNIGHT_ROSE_70B = "sophosympatheia/midnight-rose-70b"
|
|
322
|
+
NOVITA_L3_8B_LUNARIS = "sao10k/l3-8b-lunaris"
|
|
323
|
+
NOVITA_GLM_4_9B_0414 = "thudm/glm-4-9b-0414"
|
|
324
|
+
NOVITA_GLM_Z1_9B_0414 = "thudm/glm-z1-9b-0414"
|
|
325
|
+
NOVITA_GLM_Z1_32B_0414 = "thudm/glm-z1-32b-0414"
|
|
326
|
+
NOVITA_GLM_4_32B_0414 = "thudm/glm-4-32b-0414"
|
|
327
|
+
NOVITA_GLM_Z1_RUMINATION_32B_0414 = "thudm/glm-z1-rumination-32b-0414"
|
|
328
|
+
NOVITA_QWEN_2_5_7B = "qwen/qwen2.5-7b-instruct"
|
|
329
|
+
NOVITA_LLAMA_3_2_1B = "meta-llama/llama-3.2-1b-instruct"
|
|
330
|
+
NOVITA_LLAMA_3_2_11B_VISION = "meta-llama/llama-3.2-11b-vision-instruct"
|
|
331
|
+
NOVITA_LLAMA_3_2_3B = "meta-llama/llama-3.2-3b-instruct"
|
|
332
|
+
NOVITA_LLAMA_3_1_8B_BF16 = "meta-llama/llama-3.1-8b-instruct-bf16"
|
|
333
|
+
NOVITA_L31_70B_EURYALE_V2_2 = "sao10k/l31-70b-euryale-v2.2"
|
|
334
|
+
|
|
273
335
|
# ModelScope models support tool calling
|
|
274
336
|
MODELSCOPE_QWEN_2_5_7B_INSTRUCT = "Qwen/Qwen2.5-7B-Instruct"
|
|
275
337
|
MODELSCOPE_QWEN_2_5_14B_INSTRUCT = "Qwen/Qwen2.5-14B-Instruct"
|
|
@@ -278,6 +340,8 @@ class ModelType(UnifiedModelType, Enum):
|
|
|
278
340
|
MODELSCOPE_QWEN_2_5_CODER_7B_INSTRUCT = "Qwen/Qwen2.5-Coder-7B-Instruct"
|
|
279
341
|
MODELSCOPE_QWEN_2_5_CODER_14B_INSTRUCT = "Qwen/Qwen2.5-Coder-14B-Instruct"
|
|
280
342
|
MODELSCOPE_QWEN_2_5_CODER_32B_INSTRUCT = "Qwen/Qwen2.5-Coder-32B-Instruct"
|
|
343
|
+
MODELSCOPE_QWEN_3_235B_A22B = "Qwen/Qwen3-235B-A22B"
|
|
344
|
+
MODELSCOPE_QWEN_3_32B = "Qwen/Qwen3-32B"
|
|
281
345
|
MODELSCOPE_QWQ_32B = "Qwen/QwQ-32B"
|
|
282
346
|
MODELSCOPE_QWQ_32B_PREVIEW = "Qwen/QwQ-32B-Preview"
|
|
283
347
|
MODELSCOPE_LLAMA_3_1_8B_INSTRUCT = (
|
|
@@ -293,6 +357,22 @@ class ModelType(UnifiedModelType, Enum):
|
|
|
293
357
|
MODELSCOPE_MINISTRAL_8B_INSTRUCT = "mistralai/Ministral-8B-Instruct-2410"
|
|
294
358
|
MODELSCOPE_DEEPSEEK_V3_0324 = "deepseek-ai/DeepSeek-V3-0324"
|
|
295
359
|
|
|
360
|
+
# WatsonX models
|
|
361
|
+
WATSONX_GRANITE_3_8B_INSTRUCT = "ibm/granite-3-8b-instruct"
|
|
362
|
+
WATSONX_LLAMA_3_3_70B_INSTRUCT = "meta-llama/llama-3-3-70b-instruct"
|
|
363
|
+
WATSONX_LLAMA_3_2_1B_INSTRUCT = "meta-llama/llama-3-2-1b-instruct"
|
|
364
|
+
WATSONX_LLAMA_3_2_3B_INSTRUCT = "meta-llama/llama-3-2-3b-instruct"
|
|
365
|
+
WATSONX_LLAMA_3_2_11B_VISION_INSTRUCT = (
|
|
366
|
+
"meta-llama/llama-3-2-11b-vision-instruct"
|
|
367
|
+
)
|
|
368
|
+
WATSONX_LLAMA_3_2_90B_VISION_INSTRUCT = (
|
|
369
|
+
"meta-llama/llama-3-2-90b-vision-instruct"
|
|
370
|
+
)
|
|
371
|
+
WATSONX_LLAMA_GUARD_3_11B_VISION_INSTRUCT = (
|
|
372
|
+
"meta-llama/llama-guard-3-11b-vision-instruct"
|
|
373
|
+
)
|
|
374
|
+
WATSONX_MISTRAL_LARGE = "mistralai/mistral-large"
|
|
375
|
+
|
|
296
376
|
def __str__(self):
|
|
297
377
|
return self.value
|
|
298
378
|
|
|
@@ -345,6 +425,7 @@ class ModelType(UnifiedModelType, Enum):
|
|
|
345
425
|
self.is_zhipuai,
|
|
346
426
|
self.is_aiml,
|
|
347
427
|
self.is_azure_openai,
|
|
428
|
+
self.is_novita,
|
|
348
429
|
]
|
|
349
430
|
)
|
|
350
431
|
|
|
@@ -609,6 +690,10 @@ class ModelType(UnifiedModelType, Enum):
|
|
|
609
690
|
ModelType.QWEN_QWQ_32B,
|
|
610
691
|
ModelType.QWEN_QVQ_72B,
|
|
611
692
|
ModelType.QWEN_QWQ_PLUS,
|
|
693
|
+
ModelType.QWEN_PLUS_LATEST,
|
|
694
|
+
ModelType.QWEN_PLUS_2025_04_28,
|
|
695
|
+
ModelType.QWEN_TURBO_LATEST,
|
|
696
|
+
ModelType.QWEN_TURBO_2025_04_28,
|
|
612
697
|
}
|
|
613
698
|
|
|
614
699
|
@property
|
|
@@ -632,6 +717,7 @@ class ModelType(UnifiedModelType, Enum):
|
|
|
632
717
|
@property
|
|
633
718
|
def is_ppio(self) -> bool:
|
|
634
719
|
return self in {
|
|
720
|
+
ModelType.PPIO_DEEPSEEK_PROVER_V2_671B,
|
|
635
721
|
ModelType.PPIO_DEEPSEEK_R1_TURBO,
|
|
636
722
|
ModelType.PPIO_DEEPSEEK_V3_TURBO,
|
|
637
723
|
ModelType.PPIO_DEEPSEEK_R1_COMMUNITY,
|
|
@@ -664,6 +750,8 @@ class ModelType(UnifiedModelType, Enum):
|
|
|
664
750
|
ModelType.MODELSCOPE_QWEN_2_5_CODER_7B_INSTRUCT,
|
|
665
751
|
ModelType.MODELSCOPE_QWEN_2_5_CODER_14B_INSTRUCT,
|
|
666
752
|
ModelType.MODELSCOPE_QWEN_2_5_CODER_32B_INSTRUCT,
|
|
753
|
+
ModelType.MODELSCOPE_QWEN_3_235B_A22B,
|
|
754
|
+
ModelType.MODELSCOPE_QWEN_3_32B,
|
|
667
755
|
ModelType.MODELSCOPE_QWQ_32B,
|
|
668
756
|
ModelType.MODELSCOPE_QWQ_32B_PREVIEW,
|
|
669
757
|
ModelType.MODELSCOPE_LLAMA_3_1_8B_INSTRUCT,
|
|
@@ -713,6 +801,67 @@ class ModelType(UnifiedModelType, Enum):
|
|
|
713
801
|
ModelType.SILICONFLOW_PRO_THUDM_GLM_4_9B_CHAT,
|
|
714
802
|
}
|
|
715
803
|
|
|
804
|
+
@property
|
|
805
|
+
def is_watsonx(self) -> bool:
|
|
806
|
+
return self in {
|
|
807
|
+
ModelType.WATSONX_GRANITE_3_8B_INSTRUCT,
|
|
808
|
+
ModelType.WATSONX_LLAMA_3_3_70B_INSTRUCT,
|
|
809
|
+
ModelType.WATSONX_LLAMA_3_2_1B_INSTRUCT,
|
|
810
|
+
ModelType.WATSONX_LLAMA_3_2_3B_INSTRUCT,
|
|
811
|
+
ModelType.WATSONX_LLAMA_3_2_11B_VISION_INSTRUCT,
|
|
812
|
+
ModelType.WATSONX_LLAMA_3_2_90B_VISION_INSTRUCT,
|
|
813
|
+
ModelType.WATSONX_LLAMA_GUARD_3_11B_VISION_INSTRUCT,
|
|
814
|
+
ModelType.WATSONX_MISTRAL_LARGE,
|
|
815
|
+
}
|
|
816
|
+
|
|
817
|
+
@property
|
|
818
|
+
def is_novita(self) -> bool:
|
|
819
|
+
return self in {
|
|
820
|
+
ModelType.NOVITA_LLAMA_4_MAVERICK_17B,
|
|
821
|
+
ModelType.NOVITA_LLAMA_4_SCOUT_17B,
|
|
822
|
+
ModelType.NOVITA_DEEPSEEK_V3_0324,
|
|
823
|
+
ModelType.NOVITA_QWEN_2_5_V1_72B,
|
|
824
|
+
ModelType.NOVITA_DEEPSEEK_V3_TURBO,
|
|
825
|
+
ModelType.NOVITA_DEEPSEEK_R1_TURBO,
|
|
826
|
+
ModelType.NOVITA_GEMMA_3_27B_IT,
|
|
827
|
+
ModelType.NOVITA_QWEN_32B,
|
|
828
|
+
ModelType.NOVITA_L3_8B_STHENO_V3_2,
|
|
829
|
+
ModelType.NOVITA_MYTHOMAX_L2_13B,
|
|
830
|
+
ModelType.NOVITA_DEEPSEEK_R1_DISTILL_LLAMA_8B,
|
|
831
|
+
ModelType.NOVITA_DEEPSEEK_V3,
|
|
832
|
+
ModelType.NOVITA_LLAMA_3_1_8B,
|
|
833
|
+
ModelType.NOVITA_DEEPSEEK_R1_DISTILL_QWEN_14B,
|
|
834
|
+
ModelType.NOVITA_LLAMA_3_3_70B,
|
|
835
|
+
ModelType.NOVITA_QWEN_2_5_72B,
|
|
836
|
+
ModelType.NOVITA_MISTRAL_NEMO,
|
|
837
|
+
ModelType.NOVITA_DEEPSEEK_R1_DISTILL_QWEN_32B,
|
|
838
|
+
ModelType.NOVITA_LLAMA_3_8B,
|
|
839
|
+
ModelType.NOVITA_WIZARDLM_2_8X22B,
|
|
840
|
+
ModelType.NOVITA_DEEPSEEK_R1_DISTILL_LLAMA_70B,
|
|
841
|
+
ModelType.NOVITA_LLAMA_3_1_70B,
|
|
842
|
+
ModelType.NOVITA_GEMMA_2_9B_IT,
|
|
843
|
+
ModelType.NOVITA_MISTRAL_7B,
|
|
844
|
+
ModelType.NOVITA_LLAMA_3_70B,
|
|
845
|
+
ModelType.NOVITA_DEEPSEEK_R1,
|
|
846
|
+
ModelType.NOVITA_HERMES_2_PRO_LLAMA_3_8B,
|
|
847
|
+
ModelType.NOVITA_L3_70B_EURYALE_V2_1,
|
|
848
|
+
ModelType.NOVITA_DOLPHIN_MIXTRAL_8X22B,
|
|
849
|
+
ModelType.NOVITA_AIROBOROS_L2_70B,
|
|
850
|
+
ModelType.NOVITA_MIDNIGHT_ROSE_70B,
|
|
851
|
+
ModelType.NOVITA_L3_8B_LUNARIS,
|
|
852
|
+
ModelType.NOVITA_GLM_4_9B_0414,
|
|
853
|
+
ModelType.NOVITA_GLM_Z1_9B_0414,
|
|
854
|
+
ModelType.NOVITA_GLM_Z1_32B_0414,
|
|
855
|
+
ModelType.NOVITA_GLM_4_32B_0414,
|
|
856
|
+
ModelType.NOVITA_GLM_Z1_RUMINATION_32B_0414,
|
|
857
|
+
ModelType.NOVITA_QWEN_2_5_7B,
|
|
858
|
+
ModelType.NOVITA_LLAMA_3_2_1B,
|
|
859
|
+
ModelType.NOVITA_LLAMA_3_2_11B_VISION,
|
|
860
|
+
ModelType.NOVITA_LLAMA_3_2_3B,
|
|
861
|
+
ModelType.NOVITA_LLAMA_3_1_8B_BF16,
|
|
862
|
+
ModelType.NOVITA_L31_70B_EURYALE_V2_2,
|
|
863
|
+
}
|
|
864
|
+
|
|
716
865
|
@property
|
|
717
866
|
def is_aiml(self) -> bool:
|
|
718
867
|
return self in {
|
|
@@ -740,6 +889,9 @@ class ModelType(UnifiedModelType, Enum):
|
|
|
740
889
|
ModelType.COHERE_COMMAND_LIGHT,
|
|
741
890
|
ModelType.NVIDIA_NEMOTRON_340B_INSTRUCT,
|
|
742
891
|
ModelType.NVIDIA_NEMOTRON_340B_REWARD,
|
|
892
|
+
ModelType.NOVITA_MYTHOMAX_L2_13B,
|
|
893
|
+
ModelType.NOVITA_AIROBOROS_L2_70B,
|
|
894
|
+
ModelType.NOVITA_MIDNIGHT_ROSE_70B,
|
|
743
895
|
}:
|
|
744
896
|
return 4_096
|
|
745
897
|
elif self in {
|
|
@@ -761,12 +913,27 @@ class ModelType(UnifiedModelType, Enum):
|
|
|
761
913
|
ModelType.LMSTUDIO_GEMMA_3_4B,
|
|
762
914
|
ModelType.LMSTUDIO_GEMMA_3_12B,
|
|
763
915
|
ModelType.LMSTUDIO_GEMMA_3_27B,
|
|
916
|
+
ModelType.WATSONX_GRANITE_3_8B_INSTRUCT,
|
|
917
|
+
ModelType.NOVITA_L3_8B_STHENO_V3_2,
|
|
918
|
+
ModelType.NOVITA_LLAMA_3_8B,
|
|
919
|
+
ModelType.NOVITA_GEMMA_2_9B_IT,
|
|
920
|
+
ModelType.NOVITA_LLAMA_3_70B,
|
|
921
|
+
ModelType.NOVITA_HERMES_2_PRO_LLAMA_3_8B,
|
|
922
|
+
ModelType.NOVITA_L3_70B_EURYALE_V2_1,
|
|
923
|
+
ModelType.NOVITA_L3_8B_LUNARIS,
|
|
924
|
+
ModelType.NOVITA_LLAMA_3_1_8B_BF16,
|
|
925
|
+
ModelType.NOVITA_L31_70B_EURYALE_V2_2,
|
|
764
926
|
}:
|
|
765
927
|
return 8_192
|
|
766
928
|
elif self in {
|
|
767
929
|
ModelType.PPIO_BAICHUAN_2_13B_CHAT,
|
|
768
930
|
}:
|
|
769
931
|
return 14_336
|
|
932
|
+
elif self in {
|
|
933
|
+
ModelType.PPIO_DEEPSEEK_PROVER_V2_671B,
|
|
934
|
+
ModelType.NOVITA_DOLPHIN_MIXTRAL_8X22B,
|
|
935
|
+
}:
|
|
936
|
+
return 16_000
|
|
770
937
|
elif self in {
|
|
771
938
|
ModelType.GPT_3_5_TURBO,
|
|
772
939
|
ModelType.YI_LIGHTNING,
|
|
@@ -780,11 +947,21 @@ class ModelType(UnifiedModelType, Enum):
|
|
|
780
947
|
ModelType.GLM_4V_PLUS_0111,
|
|
781
948
|
ModelType.GLM_ZERO_PREVIEW,
|
|
782
949
|
ModelType.PPIO_YI_1_5_34B_CHAT,
|
|
950
|
+
ModelType.NOVITA_LLAMA_3_1_8B,
|
|
783
951
|
}:
|
|
784
952
|
return 16_384
|
|
785
|
-
|
|
786
953
|
elif self in {
|
|
787
954
|
ModelType.NETMIND_DOUBAO_1_5_PRO,
|
|
955
|
+
ModelType.NOVITA_GEMMA_3_27B_IT,
|
|
956
|
+
ModelType.NOVITA_DEEPSEEK_R1_DISTILL_LLAMA_8B,
|
|
957
|
+
ModelType.NOVITA_QWEN_2_5_72B,
|
|
958
|
+
ModelType.NOVITA_DEEPSEEK_R1_DISTILL_LLAMA_70B,
|
|
959
|
+
ModelType.NOVITA_GLM_4_9B_0414,
|
|
960
|
+
ModelType.NOVITA_GLM_Z1_9B_0414,
|
|
961
|
+
ModelType.NOVITA_GLM_Z1_32B_0414,
|
|
962
|
+
ModelType.NOVITA_GLM_4_32B_0414,
|
|
963
|
+
ModelType.NOVITA_GLM_Z1_RUMINATION_32B_0414,
|
|
964
|
+
ModelType.NOVITA_QWEN_2_5_7B,
|
|
788
965
|
}:
|
|
789
966
|
return 32_000
|
|
790
967
|
elif self in {
|
|
@@ -820,6 +997,8 @@ class ModelType(UnifiedModelType, Enum):
|
|
|
820
997
|
ModelType.MODELSCOPE_QWEN_2_5_CODER_7B_INSTRUCT,
|
|
821
998
|
ModelType.MODELSCOPE_QWEN_2_5_CODER_14B_INSTRUCT,
|
|
822
999
|
ModelType.MODELSCOPE_QWEN_2_5_CODER_32B_INSTRUCT,
|
|
1000
|
+
ModelType.MODELSCOPE_QWEN_3_235B_A22B,
|
|
1001
|
+
ModelType.MODELSCOPE_QWEN_3_32B,
|
|
823
1002
|
ModelType.MODELSCOPE_QWQ_32B,
|
|
824
1003
|
ModelType.MODELSCOPE_QWQ_32B_PREVIEW,
|
|
825
1004
|
ModelType.MODELSCOPE_LLAMA_3_1_8B_INSTRUCT,
|
|
@@ -829,6 +1008,12 @@ class ModelType(UnifiedModelType, Enum):
|
|
|
829
1008
|
ModelType.MODELSCOPE_MINISTRAL_8B_INSTRUCT,
|
|
830
1009
|
ModelType.MODELSCOPE_DEEPSEEK_V3_0324,
|
|
831
1010
|
ModelType.OPENROUTER_LLAMA_3_1_405B,
|
|
1011
|
+
ModelType.WATSONX_MISTRAL_LARGE,
|
|
1012
|
+
ModelType.NOVITA_QWEN_32B,
|
|
1013
|
+
ModelType.NOVITA_LLAMA_3_1_70B,
|
|
1014
|
+
ModelType.NOVITA_MISTRAL_7B,
|
|
1015
|
+
ModelType.NOVITA_LLAMA_3_2_11B_VISION,
|
|
1016
|
+
ModelType.NOVITA_LLAMA_3_2_3B,
|
|
832
1017
|
}:
|
|
833
1018
|
return 32_768
|
|
834
1019
|
elif self in {
|
|
@@ -843,8 +1028,22 @@ class ModelType(UnifiedModelType, Enum):
|
|
|
843
1028
|
ModelType.PPIO_DEEPSEEK_V3,
|
|
844
1029
|
ModelType.AWS_DEEPSEEK_R1,
|
|
845
1030
|
ModelType.NETMIND_QWQ_32B,
|
|
1031
|
+
ModelType.NOVITA_DEEPSEEK_V3_TURBO,
|
|
1032
|
+
ModelType.NOVITA_DEEPSEEK_R1_TURBO,
|
|
1033
|
+
ModelType.NOVITA_DEEPSEEK_V3,
|
|
1034
|
+
ModelType.NOVITA_DEEPSEEK_R1_DISTILL_QWEN_14B,
|
|
1035
|
+
ModelType.NOVITA_DEEPSEEK_R1_DISTILL_QWEN_32B,
|
|
1036
|
+
ModelType.NOVITA_DEEPSEEK_R1,
|
|
846
1037
|
}:
|
|
847
1038
|
return 64_000
|
|
1039
|
+
elif self in {
|
|
1040
|
+
ModelType.NOVITA_WIZARDLM_2_8X22B,
|
|
1041
|
+
}:
|
|
1042
|
+
return 65_535
|
|
1043
|
+
elif self in {
|
|
1044
|
+
ModelType.NOVITA_QWEN_2_5_V1_72B,
|
|
1045
|
+
}:
|
|
1046
|
+
return 96_000
|
|
848
1047
|
elif self in {
|
|
849
1048
|
ModelType.CLAUDE_2_0,
|
|
850
1049
|
ModelType.CLAUDE_INSTANT_1_2,
|
|
@@ -894,13 +1093,22 @@ class ModelType(UnifiedModelType, Enum):
|
|
|
894
1093
|
ModelType.AWS_LLAMA_3_2_11B_INSTRUCT,
|
|
895
1094
|
ModelType.NETMIND_DEEPSEEK_R1,
|
|
896
1095
|
ModelType.NETMIND_DEEPSEEK_V3,
|
|
1096
|
+
ModelType.NOVITA_DEEPSEEK_V3_0324,
|
|
897
1097
|
}:
|
|
898
1098
|
return 128_000
|
|
1099
|
+
elif self in {
|
|
1100
|
+
ModelType.NOVITA_LLAMA_3_2_1B,
|
|
1101
|
+
}:
|
|
1102
|
+
return 131_000
|
|
899
1103
|
elif self in {
|
|
900
1104
|
ModelType.GROQ_LLAMA_3_1_8B,
|
|
901
1105
|
ModelType.QWEN_PLUS,
|
|
902
1106
|
ModelType.QWEN_TURBO,
|
|
903
1107
|
ModelType.QWEN_CODER_TURBO,
|
|
1108
|
+
ModelType.QWEN_PLUS_LATEST,
|
|
1109
|
+
ModelType.QWEN_PLUS_2025_04_28,
|
|
1110
|
+
ModelType.QWEN_TURBO_LATEST,
|
|
1111
|
+
ModelType.QWEN_TURBO_2025_04_28,
|
|
904
1112
|
ModelType.TOGETHER_LLAMA_3_1_8B,
|
|
905
1113
|
ModelType.TOGETHER_LLAMA_3_1_70B,
|
|
906
1114
|
ModelType.TOGETHER_LLAMA_3_1_405B,
|
|
@@ -911,6 +1119,15 @@ class ModelType(UnifiedModelType, Enum):
|
|
|
911
1119
|
ModelType.OPENROUTER_LLAMA_3_1_70B,
|
|
912
1120
|
ModelType.PPIO_LLAMA_3_3_70B,
|
|
913
1121
|
ModelType.OPENROUTER_LLAMA_4_SCOUT,
|
|
1122
|
+
ModelType.WATSONX_LLAMA_3_3_70B_INSTRUCT,
|
|
1123
|
+
ModelType.WATSONX_LLAMA_3_2_1B_INSTRUCT,
|
|
1124
|
+
ModelType.WATSONX_LLAMA_3_2_3B_INSTRUCT,
|
|
1125
|
+
ModelType.WATSONX_LLAMA_3_2_11B_VISION_INSTRUCT,
|
|
1126
|
+
ModelType.WATSONX_LLAMA_3_2_90B_VISION_INSTRUCT,
|
|
1127
|
+
ModelType.WATSONX_LLAMA_GUARD_3_11B_VISION_INSTRUCT,
|
|
1128
|
+
ModelType.NOVITA_LLAMA_4_SCOUT_17B,
|
|
1129
|
+
ModelType.NOVITA_LLAMA_3_3_70B,
|
|
1130
|
+
ModelType.NOVITA_MISTRAL_NEMO,
|
|
914
1131
|
}:
|
|
915
1132
|
return 131_072
|
|
916
1133
|
elif self in {
|
|
@@ -961,6 +1178,7 @@ class ModelType(UnifiedModelType, Enum):
|
|
|
961
1178
|
ModelType.GPT_4_1,
|
|
962
1179
|
ModelType.GPT_4_1_MINI,
|
|
963
1180
|
ModelType.GPT_4_1_NANO,
|
|
1181
|
+
ModelType.NOVITA_LLAMA_4_MAVERICK_17B,
|
|
964
1182
|
}:
|
|
965
1183
|
return 1_048_576
|
|
966
1184
|
elif self in {
|
|
@@ -1159,6 +1377,8 @@ class ModelPlatformType(Enum):
|
|
|
1159
1377
|
AIML = "aiml"
|
|
1160
1378
|
VOLCANO = "volcano"
|
|
1161
1379
|
NETMIND = "netmind"
|
|
1380
|
+
NOVITA = "novita"
|
|
1381
|
+
WATSONX = "watsonx"
|
|
1162
1382
|
|
|
1163
1383
|
@classmethod
|
|
1164
1384
|
def from_name(cls, name):
|
|
@@ -1324,6 +1544,11 @@ class ModelPlatformType(Enum):
|
|
|
1324
1544
|
r"""Returns whether this platform is volcano."""
|
|
1325
1545
|
return self is ModelPlatformType.VOLCANO
|
|
1326
1546
|
|
|
1547
|
+
@property
|
|
1548
|
+
def is_novita(self) -> bool:
|
|
1549
|
+
r"""Returns whether this platform is Novita."""
|
|
1550
|
+
return self is ModelPlatformType.NOVITA
|
|
1551
|
+
|
|
1327
1552
|
|
|
1328
1553
|
class AudioModelType(Enum):
|
|
1329
1554
|
TTS_1 = "tts-1"
|
|
@@ -153,6 +153,16 @@ class UnifiedModelType(str):
|
|
|
153
153
|
r"""Returns whether this platform is Moonshot model."""
|
|
154
154
|
return True
|
|
155
155
|
|
|
156
|
+
@property
|
|
157
|
+
def is_novita(self) -> bool:
|
|
158
|
+
r"""Returns whether the model is a Novita served model."""
|
|
159
|
+
return True
|
|
160
|
+
|
|
161
|
+
@property
|
|
162
|
+
def is_watsonx(self) -> bool:
|
|
163
|
+
r"""Returns whether the model is a WatsonX served model."""
|
|
164
|
+
return True
|
|
165
|
+
|
|
156
166
|
@property
|
|
157
167
|
def support_native_structured_output(self) -> bool:
|
|
158
168
|
r"""Returns whether the model supports native structured output."""
|
camel/utils/__init__.py
CHANGED
|
@@ -41,6 +41,7 @@ from .commons import (
|
|
|
41
41
|
)
|
|
42
42
|
from .constants import Constants
|
|
43
43
|
from .deduplication import DeduplicationResult, deduplicate_internally
|
|
44
|
+
from .filename import sanitize_filename
|
|
44
45
|
from .mcp import MCPServer
|
|
45
46
|
from .response_format import get_pydantic_model
|
|
46
47
|
from .token_counting import (
|
|
@@ -90,4 +91,5 @@ __all__ = [
|
|
|
90
91
|
"BatchProcessor",
|
|
91
92
|
"with_timeout",
|
|
92
93
|
"MCPServer",
|
|
94
|
+
"sanitize_filename",
|
|
93
95
|
]
|