camel-ai 0.2.65__py3-none-any.whl → 0.2.82__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of camel-ai might be problematic. Click here for more details.
- camel/__init__.py +3 -3
- camel/agents/__init__.py +2 -2
- camel/agents/_types.py +9 -4
- camel/agents/_utils.py +40 -2
- camel/agents/base.py +2 -2
- camel/agents/chat_agent.py +4835 -947
- camel/agents/critic_agent.py +2 -2
- camel/agents/deductive_reasoner_agent.py +56 -56
- camel/agents/embodied_agent.py +2 -2
- camel/agents/knowledge_graph_agent.py +20 -20
- camel/agents/mcp_agent.py +35 -36
- camel/agents/multi_hop_generator_agent.py +3 -3
- camel/agents/programmed_agent_instruction.py +2 -2
- camel/agents/repo_agent.py +4 -3
- camel/agents/role_assignment_agent.py +2 -2
- camel/agents/search_agent.py +2 -2
- camel/agents/task_agent.py +2 -2
- camel/agents/tool_agents/__init__.py +2 -2
- camel/agents/tool_agents/base.py +2 -2
- camel/agents/tool_agents/hugging_face_tool_agent.py +3 -3
- camel/benchmarks/__init__.py +2 -2
- camel/benchmarks/apibank.py +5 -5
- camel/benchmarks/apibench.py +2 -2
- camel/benchmarks/base.py +2 -2
- camel/benchmarks/browsecomp.py +44 -33
- camel/benchmarks/gaia.py +17 -13
- camel/benchmarks/mock_website/README.md +1 -3
- camel/benchmarks/mock_website/mock_web.py +2 -2
- camel/benchmarks/mock_website/requirements.txt +1 -1
- camel/benchmarks/mock_website/shopping_mall/app.py +2 -2
- camel/benchmarks/mock_website/task.json +1 -1
- camel/benchmarks/nexus.py +3 -3
- camel/benchmarks/ragbench.py +2 -2
- camel/bots/__init__.py +2 -2
- camel/bots/discord/__init__.py +2 -2
- camel/bots/discord/discord_app.py +2 -2
- camel/bots/discord/discord_installation.py +2 -2
- camel/bots/discord/discord_store.py +3 -3
- camel/bots/slack/__init__.py +2 -2
- camel/bots/slack/models.py +4 -4
- camel/bots/slack/slack_app.py +2 -2
- camel/bots/telegram_bot.py +2 -2
- camel/configs/__init__.py +23 -2
- camel/configs/aihubmix_config.py +90 -0
- camel/configs/aiml_config.py +2 -2
- camel/configs/amd_config.py +70 -0
- camel/configs/anthropic_config.py +2 -2
- camel/configs/base_config.py +2 -2
- camel/configs/bedrock_config.py +5 -3
- camel/configs/cerebras_config.py +98 -0
- camel/configs/cohere_config.py +2 -2
- camel/configs/cometapi_config.py +106 -0
- camel/configs/crynux_config.py +2 -2
- camel/configs/deepseek_config.py +9 -8
- camel/configs/gemini_config.py +6 -4
- camel/configs/groq_config.py +6 -4
- camel/configs/internlm_config.py +6 -4
- camel/configs/litellm_config.py +2 -2
- camel/configs/lmstudio_config.py +6 -4
- camel/configs/minimax_config.py +95 -0
- camel/configs/mistral_config.py +2 -2
- camel/configs/modelscope_config.py +5 -3
- camel/configs/moonshot_config.py +2 -2
- camel/configs/nebius_config.py +105 -0
- camel/configs/netmind_config.py +2 -2
- camel/configs/novita_config.py +2 -2
- camel/configs/nvidia_config.py +2 -2
- camel/configs/ollama_config.py +2 -2
- camel/configs/openai_config.py +5 -3
- camel/configs/openrouter_config.py +6 -4
- camel/configs/ppio_config.py +2 -2
- camel/configs/qianfan_config.py +85 -0
- camel/configs/qwen_config.py +2 -2
- camel/configs/reka_config.py +2 -2
- camel/configs/samba_config.py +6 -4
- camel/configs/sglang_config.py +2 -2
- camel/configs/siliconflow_config.py +2 -2
- camel/configs/togetherai_config.py +2 -2
- camel/configs/vllm_config.py +4 -2
- camel/configs/watsonx_config.py +2 -2
- camel/configs/yi_config.py +6 -4
- camel/configs/zhipuai_config.py +6 -4
- camel/data_collectors/__init__.py +2 -2
- camel/data_collectors/alpaca_collector.py +18 -9
- camel/data_collectors/base.py +2 -2
- camel/data_collectors/sharegpt_collector.py +2 -2
- camel/datagen/__init__.py +2 -2
- camel/datagen/cot_datagen.py +3 -3
- camel/datagen/evol_instruct/__init__.py +2 -2
- camel/datagen/evol_instruct/evol_instruct.py +2 -2
- camel/datagen/evol_instruct/scorer.py +12 -12
- camel/datagen/evol_instruct/templates.py +16 -16
- camel/datagen/self_improving_cot.py +5 -5
- camel/datagen/self_instruct/__init__.py +2 -2
- camel/datagen/self_instruct/filter/__init__.py +2 -2
- camel/datagen/self_instruct/filter/filter_function.py +2 -2
- camel/datagen/self_instruct/filter/filter_registry.py +2 -2
- camel/datagen/self_instruct/filter/instruction_filter.py +2 -2
- camel/datagen/self_instruct/self_instruct.py +2 -2
- camel/datagen/self_instruct/templates.py +47 -47
- camel/datagen/source2synth/__init__.py +2 -2
- camel/datagen/source2synth/data_processor.py +2 -2
- camel/datagen/source2synth/models.py +2 -2
- camel/datagen/source2synth/user_data_processor_config.py +2 -2
- camel/datahubs/__init__.py +2 -2
- camel/datahubs/base.py +2 -2
- camel/datahubs/huggingface.py +2 -2
- camel/datahubs/models.py +2 -2
- camel/datasets/__init__.py +2 -2
- camel/datasets/base_generator.py +41 -12
- camel/datasets/few_shot_generator.py +18 -18
- camel/datasets/models.py +2 -2
- camel/datasets/self_instruct_generator.py +2 -2
- camel/datasets/static_dataset.py +2 -2
- camel/embeddings/__init__.py +2 -2
- camel/embeddings/azure_embedding.py +2 -2
- camel/embeddings/base.py +2 -2
- camel/embeddings/gemini_embedding.py +2 -2
- camel/embeddings/jina_embedding.py +2 -2
- camel/embeddings/mistral_embedding.py +2 -2
- camel/embeddings/openai_compatible_embedding.py +2 -2
- camel/embeddings/openai_embedding.py +2 -2
- camel/embeddings/sentence_transformers_embeddings.py +2 -2
- camel/embeddings/together_embedding.py +2 -2
- camel/embeddings/vlm_embedding.py +2 -2
- camel/environments/__init__.py +14 -2
- camel/environments/models.py +2 -2
- camel/environments/multi_step.py +2 -2
- camel/environments/rlcards_env.py +860 -0
- camel/environments/single_step.py +30 -5
- camel/environments/tic_tac_toe.py +3 -3
- camel/extractors/__init__.py +2 -2
- camel/extractors/base.py +2 -2
- camel/extractors/python_strategies.py +2 -2
- camel/generators.py +2 -2
- camel/human.py +2 -2
- camel/interpreters/__init__.py +4 -2
- camel/interpreters/base.py +2 -2
- camel/interpreters/docker/Dockerfile +14 -24
- camel/interpreters/docker_interpreter.py +5 -4
- camel/interpreters/e2b_interpreter.py +36 -3
- camel/interpreters/internal_python_interpreter.py +53 -4
- camel/interpreters/interpreter_error.py +2 -2
- camel/interpreters/ipython_interpreter.py +2 -2
- camel/interpreters/microsandbox_interpreter.py +395 -0
- camel/interpreters/subprocess_interpreter.py +2 -2
- camel/loaders/__init__.py +13 -4
- camel/loaders/apify_reader.py +2 -2
- camel/loaders/base_io.py +2 -2
- camel/loaders/base_loader.py +85 -0
- camel/loaders/chunkr_reader.py +11 -2
- camel/loaders/crawl4ai_reader.py +2 -2
- camel/loaders/firecrawl_reader.py +6 -6
- camel/loaders/jina_url_reader.py +2 -2
- camel/loaders/markitdown.py +2 -2
- camel/loaders/mineru_extractor.py +2 -2
- camel/loaders/mistral_reader.py +2 -2
- camel/loaders/scrapegraph_reader.py +2 -2
- camel/loaders/unstructured_io.py +2 -2
- camel/logger.py +5 -5
- camel/memories/__init__.py +2 -2
- camel/memories/agent_memories.py +86 -3
- camel/memories/base.py +36 -2
- camel/memories/blocks/__init__.py +2 -2
- camel/memories/blocks/chat_history_block.py +125 -7
- camel/memories/blocks/vectordb_block.py +10 -3
- camel/memories/context_creators/__init__.py +2 -2
- camel/memories/context_creators/score_based.py +31 -239
- camel/memories/records.py +90 -10
- camel/messages/__init__.py +2 -2
- camel/messages/base.py +178 -43
- camel/messages/conversion/__init__.py +2 -2
- camel/messages/conversion/alpaca.py +2 -2
- camel/messages/conversion/conversation_models.py +2 -2
- camel/messages/conversion/sharegpt/__init__.py +2 -2
- camel/messages/conversion/sharegpt/function_call_formatter.py +2 -2
- camel/messages/conversion/sharegpt/hermes/__init__.py +2 -2
- camel/messages/conversion/sharegpt/hermes/hermes_function_formatter.py +2 -2
- camel/messages/func_message.py +54 -17
- camel/models/__init__.py +16 -2
- camel/models/_utils.py +3 -3
- camel/models/aihubmix_model.py +83 -0
- camel/models/aiml_model.py +11 -18
- camel/models/amd_model.py +101 -0
- camel/models/anthropic_model.py +127 -20
- camel/models/aws_bedrock_model.py +12 -35
- camel/models/azure_openai_model.py +212 -89
- camel/models/base_audio_model.py +5 -3
- camel/models/base_model.py +195 -26
- camel/models/cerebras_model.py +83 -0
- camel/models/cohere_model.py +16 -21
- camel/models/cometapi_model.py +83 -0
- camel/models/crynux_model.py +11 -18
- camel/models/deepseek_model.py +18 -58
- camel/models/fish_audio_model.py +8 -2
- camel/models/gemini_model.py +389 -26
- camel/models/groq_model.py +11 -19
- camel/models/internlm_model.py +11 -18
- camel/models/litellm_model.py +56 -34
- camel/models/lmstudio_model.py +17 -20
- camel/models/minimax_model.py +83 -0
- camel/models/mistral_model.py +18 -19
- camel/models/model_factory.py +37 -3
- camel/models/model_manager.py +26 -8
- camel/models/modelscope_model.py +13 -193
- camel/models/moonshot_model.py +195 -21
- camel/models/nebius_model.py +83 -0
- camel/models/nemotron_model.py +19 -9
- camel/models/netmind_model.py +11 -18
- camel/models/novita_model.py +11 -18
- camel/models/nvidia_model.py +11 -18
- camel/models/ollama_model.py +14 -21
- camel/models/openai_audio_models.py +2 -2
- camel/models/openai_compatible_model.py +188 -45
- camel/models/openai_model.py +216 -71
- camel/models/openrouter_model.py +11 -19
- camel/models/ppio_model.py +11 -18
- camel/models/qianfan_model.py +89 -0
- camel/models/qwen_model.py +13 -193
- camel/models/reka_model.py +21 -21
- camel/models/reward/__init__.py +2 -2
- camel/models/reward/base_reward_model.py +2 -2
- camel/models/reward/evaluator.py +2 -2
- camel/models/reward/nemotron_model.py +2 -2
- camel/models/reward/skywork_model.py +2 -2
- camel/models/samba_model.py +48 -47
- camel/models/sglang_model.py +88 -40
- camel/models/siliconflow_model.py +12 -35
- camel/models/stub_model.py +10 -7
- camel/models/togetherai_model.py +11 -18
- camel/models/vllm_model.py +10 -18
- camel/models/volcano_model.py +16 -20
- camel/models/watsonx_model.py +7 -19
- camel/models/yi_model.py +11 -18
- camel/models/zhipuai_model.py +70 -18
- camel/parsers/__init__.py +18 -0
- camel/parsers/mcp_tool_call_parser.py +176 -0
- camel/personas/__init__.py +2 -2
- camel/personas/persona.py +2 -2
- camel/personas/persona_hub.py +2 -2
- camel/prompts/__init__.py +2 -2
- camel/prompts/ai_society.py +2 -2
- camel/prompts/base.py +2 -2
- camel/prompts/code.py +2 -2
- camel/prompts/evaluation.py +2 -2
- camel/prompts/generate_text_embedding_data.py +2 -2
- camel/prompts/image_craft.py +2 -2
- camel/prompts/misalignment.py +2 -2
- camel/prompts/multi_condition_image_craft.py +2 -2
- camel/prompts/object_recognition.py +2 -2
- camel/prompts/persona_hub.py +3 -3
- camel/prompts/prompt_templates.py +2 -2
- camel/prompts/role_description_prompt_template.py +2 -2
- camel/prompts/solution_extraction.py +8 -8
- camel/prompts/task_prompt_template.py +2 -2
- camel/prompts/translation.py +2 -2
- camel/prompts/video_description_prompt.py +3 -3
- camel/responses/__init__.py +2 -2
- camel/responses/agent_responses.py +2 -2
- camel/retrievers/__init__.py +2 -2
- camel/retrievers/auto_retriever.py +3 -2
- camel/retrievers/base.py +2 -2
- camel/retrievers/bm25_retriever.py +2 -2
- camel/retrievers/cohere_rerank_retriever.py +2 -2
- camel/retrievers/hybrid_retrival.py +2 -2
- camel/retrievers/vector_retriever.py +2 -2
- camel/runtimes/Dockerfile.multi-toolkit +90 -0
- camel/runtimes/__init__.py +2 -2
- camel/runtimes/api.py +79 -23
- camel/runtimes/base.py +2 -2
- camel/runtimes/configs.py +13 -13
- camel/runtimes/daytona_runtime.py +17 -18
- camel/runtimes/docker_runtime.py +12 -12
- camel/runtimes/llm_guard_runtime.py +26 -26
- camel/runtimes/remote_http_runtime.py +11 -11
- camel/runtimes/ubuntu_docker_runtime.py +2 -2
- camel/runtimes/utils/__init__.py +2 -2
- camel/runtimes/utils/function_risk_toolkit.py +2 -2
- camel/runtimes/utils/ignore_risk_toolkit.py +2 -2
- camel/schemas/__init__.py +2 -2
- camel/schemas/base.py +2 -2
- camel/schemas/openai_converter.py +3 -3
- camel/schemas/outlines_converter.py +2 -2
- camel/services/agent_openapi_server.py +380 -0
- camel/societies/__init__.py +4 -2
- camel/societies/babyagi_playing.py +2 -2
- camel/societies/role_playing.py +201 -80
- camel/societies/workforce/__init__.py +10 -3
- camel/societies/workforce/base.py +2 -2
- camel/societies/workforce/events.py +143 -0
- camel/societies/workforce/prompts.py +258 -33
- camel/societies/workforce/role_playing_worker.py +88 -31
- camel/societies/workforce/single_agent_worker.py +638 -40
- camel/societies/workforce/structured_output_handler.py +512 -0
- camel/societies/workforce/task_channel.py +182 -38
- camel/societies/workforce/utils.py +780 -65
- camel/societies/workforce/worker.py +92 -26
- camel/societies/workforce/workflow_memory_manager.py +1746 -0
- camel/societies/workforce/workforce.py +5276 -355
- camel/societies/workforce/workforce_callback.py +103 -0
- camel/societies/workforce/workforce_logger.py +647 -0
- camel/societies/workforce/workforce_metrics.py +33 -0
- camel/storages/__init__.py +6 -2
- camel/storages/graph_storages/__init__.py +2 -2
- camel/storages/graph_storages/base.py +2 -2
- camel/storages/graph_storages/graph_element.py +2 -2
- camel/storages/graph_storages/nebula_graph.py +4 -4
- camel/storages/graph_storages/neo4j_graph.py +7 -7
- camel/storages/key_value_storages/__init__.py +2 -2
- camel/storages/key_value_storages/base.py +2 -2
- camel/storages/key_value_storages/in_memory.py +2 -2
- camel/storages/key_value_storages/json.py +17 -4
- camel/storages/key_value_storages/mem0_cloud.py +50 -49
- camel/storages/key_value_storages/redis.py +2 -2
- camel/storages/object_storages/__init__.py +2 -2
- camel/storages/object_storages/amazon_s3.py +2 -2
- camel/storages/object_storages/azure_blob.py +2 -2
- camel/storages/object_storages/base.py +2 -2
- camel/storages/object_storages/google_cloud.py +3 -3
- camel/storages/vectordb_storages/__init__.py +8 -2
- camel/storages/vectordb_storages/base.py +2 -2
- camel/storages/vectordb_storages/chroma.py +731 -0
- camel/storages/vectordb_storages/faiss.py +2 -2
- camel/storages/vectordb_storages/milvus.py +2 -2
- camel/storages/vectordb_storages/oceanbase.py +15 -15
- camel/storages/vectordb_storages/pgvector.py +349 -0
- camel/storages/vectordb_storages/qdrant.py +6 -6
- camel/storages/vectordb_storages/surreal.py +372 -0
- camel/storages/vectordb_storages/tidb.py +11 -8
- camel/storages/vectordb_storages/weaviate.py +2 -2
- camel/tasks/__init__.py +2 -2
- camel/tasks/task.py +348 -26
- camel/tasks/task_prompt.py +3 -3
- camel/terminators/__init__.py +2 -2
- camel/terminators/base.py +2 -2
- camel/terminators/response_terminator.py +2 -2
- camel/terminators/token_limit_terminator.py +2 -2
- camel/toolkits/__init__.py +54 -10
- camel/toolkits/aci_toolkit.py +66 -21
- camel/toolkits/arxiv_toolkit.py +8 -8
- camel/toolkits/ask_news_toolkit.py +2 -2
- camel/toolkits/async_browser_toolkit.py +4 -4
- camel/toolkits/audio_analysis_toolkit.py +3 -3
- camel/toolkits/base.py +65 -7
- camel/toolkits/bohrium_toolkit.py +2 -2
- camel/toolkits/browser_toolkit.py +34 -21
- camel/toolkits/browser_toolkit_commons.py +4 -4
- camel/toolkits/code_execution.py +31 -4
- camel/toolkits/context_summarizer_toolkit.py +684 -0
- camel/toolkits/craw4ai_toolkit.py +93 -0
- camel/toolkits/dappier_toolkit.py +12 -8
- camel/toolkits/data_commons_toolkit.py +2 -2
- camel/toolkits/dingtalk.py +1135 -0
- camel/toolkits/earth_science_toolkit.py +5367 -0
- camel/toolkits/edgeone_pages_mcp_toolkit.py +49 -0
- camel/toolkits/excel_toolkit.py +905 -71
- camel/toolkits/file_toolkit.py +1402 -0
- camel/toolkits/function_tool.py +126 -18
- camel/toolkits/github_toolkit.py +109 -22
- camel/toolkits/gmail_toolkit.py +1839 -0
- camel/toolkits/google_calendar_toolkit.py +40 -6
- camel/toolkits/google_drive_mcp_toolkit.py +54 -0
- camel/toolkits/google_maps_toolkit.py +2 -2
- camel/toolkits/google_scholar_toolkit.py +2 -2
- camel/toolkits/human_toolkit.py +36 -12
- camel/toolkits/hybrid_browser_toolkit/__init__.py +18 -0
- camel/toolkits/hybrid_browser_toolkit/config_loader.py +185 -0
- camel/toolkits/hybrid_browser_toolkit/hybrid_browser_toolkit.py +246 -0
- camel/toolkits/hybrid_browser_toolkit/hybrid_browser_toolkit_ts.py +1973 -0
- camel/toolkits/hybrid_browser_toolkit/installer.py +203 -0
- camel/toolkits/hybrid_browser_toolkit/ts/package-lock.json +4589 -0
- camel/toolkits/hybrid_browser_toolkit/ts/package.json +33 -0
- camel/toolkits/hybrid_browser_toolkit/ts/src/browser-scripts.js +125 -0
- camel/toolkits/hybrid_browser_toolkit/ts/src/browser-session.ts +1929 -0
- camel/toolkits/hybrid_browser_toolkit/ts/src/config-loader.ts +233 -0
- camel/toolkits/hybrid_browser_toolkit/ts/src/hybrid-browser-toolkit.ts +589 -0
- camel/toolkits/hybrid_browser_toolkit/ts/src/index.ts +7 -0
- camel/toolkits/hybrid_browser_toolkit/ts/src/parent-child-filter.ts +226 -0
- camel/toolkits/hybrid_browser_toolkit/ts/src/snapshot-parser.ts +219 -0
- camel/toolkits/hybrid_browser_toolkit/ts/src/som-screenshot-injected.ts +543 -0
- camel/toolkits/hybrid_browser_toolkit/ts/src/types.ts +129 -0
- camel/toolkits/hybrid_browser_toolkit/ts/tsconfig.json +27 -0
- camel/toolkits/hybrid_browser_toolkit/ts/websocket-server.js +319 -0
- camel/toolkits/hybrid_browser_toolkit/ws_wrapper.py +1037 -0
- camel/toolkits/hybrid_browser_toolkit_py/__init__.py +17 -0
- camel/toolkits/hybrid_browser_toolkit_py/actions.py +575 -0
- camel/toolkits/hybrid_browser_toolkit_py/agent.py +311 -0
- camel/toolkits/hybrid_browser_toolkit_py/browser_session.py +787 -0
- camel/toolkits/hybrid_browser_toolkit_py/config_loader.py +490 -0
- camel/toolkits/hybrid_browser_toolkit_py/hybrid_browser_toolkit.py +2390 -0
- camel/toolkits/hybrid_browser_toolkit_py/snapshot.py +233 -0
- camel/toolkits/hybrid_browser_toolkit_py/stealth_script.js +0 -0
- camel/toolkits/hybrid_browser_toolkit_py/unified_analyzer.js +1043 -0
- camel/toolkits/image_analysis_toolkit.py +3 -6
- camel/toolkits/image_generation_toolkit.py +390 -0
- camel/toolkits/jina_reranker_toolkit.py +5 -6
- camel/toolkits/klavis_toolkit.py +7 -3
- camel/toolkits/linkedin_toolkit.py +2 -2
- camel/toolkits/markitdown_toolkit.py +104 -0
- camel/toolkits/math_toolkit.py +66 -12
- camel/toolkits/mcp_toolkit.py +412 -36
- camel/toolkits/memory_toolkit.py +7 -3
- camel/toolkits/meshy_toolkit.py +2 -2
- camel/toolkits/message_agent_toolkit.py +608 -0
- camel/toolkits/message_integration.py +724 -0
- camel/toolkits/mineru_toolkit.py +2 -2
- camel/toolkits/minimax_mcp_toolkit.py +195 -0
- camel/toolkits/networkx_toolkit.py +2 -2
- camel/toolkits/note_taking_toolkit.py +277 -0
- camel/toolkits/notion_mcp_toolkit.py +224 -0
- camel/toolkits/notion_toolkit.py +2 -2
- camel/toolkits/open_api_specs/biztoc/__init__.py +2 -2
- camel/toolkits/open_api_specs/biztoc/ai-plugin.json +1 -1
- camel/toolkits/open_api_specs/coursera/__init__.py +2 -2
- camel/toolkits/open_api_specs/create_qr_code/__init__.py +2 -2
- camel/toolkits/open_api_specs/klarna/__init__.py +2 -2
- camel/toolkits/open_api_specs/nasa_apod/__init__.py +2 -2
- camel/toolkits/open_api_specs/outschool/__init__.py +2 -2
- camel/toolkits/open_api_specs/outschool/ai-plugin.json +1 -1
- camel/toolkits/open_api_specs/outschool/openapi.yaml +1 -1
- camel/toolkits/open_api_specs/outschool/paths/__init__.py +2 -2
- camel/toolkits/open_api_specs/outschool/paths/get_classes.py +2 -2
- camel/toolkits/open_api_specs/outschool/paths/search_teachers.py +2 -2
- camel/toolkits/open_api_specs/security_config.py +2 -2
- camel/toolkits/open_api_specs/speak/__init__.py +2 -2
- camel/toolkits/open_api_specs/web_scraper/__init__.py +2 -2
- camel/toolkits/open_api_specs/web_scraper/ai-plugin.json +1 -1
- camel/toolkits/open_api_specs/web_scraper/paths/__init__.py +2 -2
- camel/toolkits/open_api_specs/web_scraper/paths/scraper.py +2 -2
- camel/toolkits/open_api_toolkit.py +2 -2
- camel/toolkits/openbb_toolkit.py +7 -3
- camel/toolkits/origene_mcp_toolkit.py +56 -0
- camel/toolkits/page_script.js +53 -53
- camel/toolkits/playwright_mcp_toolkit.py +13 -31
- camel/toolkits/pptx_toolkit.py +36 -23
- camel/toolkits/pubmed_toolkit.py +2 -2
- camel/toolkits/pulse_mcp_search_toolkit.py +2 -2
- camel/toolkits/pyautogui_toolkit.py +2 -2
- camel/toolkits/reddit_toolkit.py +2 -2
- camel/toolkits/resend_toolkit.py +168 -0
- camel/toolkits/retrieval_toolkit.py +2 -2
- camel/toolkits/screenshot_toolkit.py +213 -0
- camel/toolkits/search_toolkit.py +539 -146
- camel/toolkits/searxng_toolkit.py +2 -2
- camel/toolkits/semantic_scholar_toolkit.py +2 -2
- camel/toolkits/slack_toolkit.py +108 -58
- camel/toolkits/sql_toolkit.py +712 -0
- camel/toolkits/stripe_toolkit.py +2 -2
- camel/toolkits/sympy_toolkit.py +3 -3
- camel/toolkits/task_planning_toolkit.py +5 -5
- camel/toolkits/terminal_toolkit/__init__.py +18 -0
- camel/toolkits/terminal_toolkit/terminal_toolkit.py +1070 -0
- camel/toolkits/terminal_toolkit/utils.py +532 -0
- camel/toolkits/thinking_toolkit.py +3 -3
- camel/toolkits/twitter_toolkit.py +2 -2
- camel/toolkits/vertex_ai_veo_toolkit.py +590 -0
- camel/toolkits/video_analysis_toolkit.py +109 -29
- camel/toolkits/video_download_toolkit.py +19 -16
- camel/toolkits/weather_toolkit.py +2 -2
- camel/toolkits/web_deploy_toolkit.py +1219 -0
- camel/toolkits/wechat_official_toolkit.py +483 -0
- camel/toolkits/whatsapp_toolkit.py +2 -2
- camel/toolkits/wolfram_alpha_toolkit.py +2 -2
- camel/toolkits/zapier_toolkit.py +7 -3
- camel/types/__init__.py +4 -4
- camel/types/agents/__init__.py +2 -2
- camel/types/agents/tool_calling_record.py +6 -3
- camel/types/enums.py +378 -39
- camel/types/mcp_registries.py +2 -2
- camel/types/openai_types.py +4 -4
- camel/types/unified_model_type.py +38 -6
- camel/utils/__init__.py +2 -2
- camel/utils/async_func.py +2 -2
- camel/utils/chunker/__init__.py +2 -2
- camel/utils/chunker/base.py +2 -2
- camel/utils/chunker/code_chunker.py +2 -2
- camel/utils/chunker/uio_chunker.py +2 -2
- camel/utils/commons.py +38 -7
- camel/utils/constants.py +5 -2
- camel/utils/context_utils.py +1134 -0
- camel/utils/deduplication.py +2 -2
- camel/utils/filename.py +2 -2
- camel/utils/langfuse.py +2 -2
- camel/utils/mcp.py +140 -6
- camel/utils/mcp_client.py +48 -38
- camel/utils/message_summarizer.py +148 -0
- camel/utils/response_format.py +2 -2
- camel/utils/token_counting.py +45 -22
- camel/utils/tool_result.py +44 -0
- camel/verifiers/__init__.py +2 -2
- camel/verifiers/base.py +2 -2
- camel/verifiers/math_verifier.py +2 -2
- camel/verifiers/models.py +2 -2
- camel/verifiers/physics_verifier.py +2 -2
- camel/verifiers/python_verifier.py +2 -2
- {camel_ai-0.2.65.dist-info → camel_ai-0.2.82.dist-info}/METADATA +327 -94
- camel_ai-0.2.82.dist-info/RECORD +507 -0
- {camel_ai-0.2.65.dist-info → camel_ai-0.2.82.dist-info}/WHEEL +1 -1
- {camel_ai-0.2.65.dist-info → camel_ai-0.2.82.dist-info}/licenses/LICENSE +1 -1
- camel/loaders/pandas_reader.py +0 -368
- camel/toolkits/dalle_toolkit.py +0 -175
- camel/toolkits/file_write_toolkit.py +0 -444
- camel/toolkits/openai_agent_toolkit.py +0 -135
- camel/toolkits/terminal_toolkit.py +0 -1037
- camel_ai-0.2.65.dist-info/RECORD +0 -426
camel/models/nemotron_model.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# ========= Copyright 2023-
|
|
1
|
+
# ========= Copyright 2023-2025 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
2
2
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
3
|
# you may not use this file except in compliance with the License.
|
|
4
4
|
# You may obtain a copy of the License at
|
|
@@ -10,9 +10,9 @@
|
|
|
10
10
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
11
|
# See the License for the specific language governing permissions and
|
|
12
12
|
# limitations under the License.
|
|
13
|
-
# ========= Copyright 2023-
|
|
13
|
+
# ========= Copyright 2023-2025 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
14
|
import os
|
|
15
|
-
from typing import Optional, Union
|
|
15
|
+
from typing import Any, Optional, Union
|
|
16
16
|
|
|
17
17
|
from camel.models.openai_compatible_model import OpenAICompatibleModel
|
|
18
18
|
from camel.types import ModelType
|
|
@@ -36,6 +36,10 @@ class NemotronModel(OpenAICompatibleModel):
|
|
|
36
36
|
API calls. If not provided, will fall back to the MODEL_TIMEOUT
|
|
37
37
|
environment variable or default to 180 seconds.
|
|
38
38
|
(default: :obj:`None`)
|
|
39
|
+
max_retries (int, optional): Maximum number of retries for API calls.
|
|
40
|
+
(default: :obj:`3`)
|
|
41
|
+
**kwargs (Any): Additional arguments to pass to the client
|
|
42
|
+
initialization.
|
|
39
43
|
|
|
40
44
|
Notes:
|
|
41
45
|
Nemotron model doesn't support additional model config like OpenAI.
|
|
@@ -52,21 +56,27 @@ class NemotronModel(OpenAICompatibleModel):
|
|
|
52
56
|
api_key: Optional[str] = None,
|
|
53
57
|
url: Optional[str] = None,
|
|
54
58
|
timeout: Optional[float] = None,
|
|
59
|
+
max_retries: int = 3,
|
|
60
|
+
**kwargs: Any,
|
|
55
61
|
) -> None:
|
|
56
62
|
url = url or os.environ.get(
|
|
57
63
|
"NVIDIA_API_BASE_URL", "https://integrate.api.nvidia.com/v1"
|
|
58
64
|
)
|
|
59
65
|
api_key = api_key or os.environ.get("NVIDIA_API_KEY")
|
|
60
66
|
timeout = timeout or float(os.environ.get("MODEL_TIMEOUT", 180))
|
|
61
|
-
super().__init__(
|
|
67
|
+
super().__init__(
|
|
68
|
+
model_type,
|
|
69
|
+
{},
|
|
70
|
+
api_key,
|
|
71
|
+
url,
|
|
72
|
+
None,
|
|
73
|
+
timeout,
|
|
74
|
+
max_retries=max_retries,
|
|
75
|
+
**kwargs,
|
|
76
|
+
)
|
|
62
77
|
|
|
63
78
|
@property
|
|
64
79
|
def token_counter(self) -> BaseTokenCounter:
|
|
65
80
|
raise NotImplementedError(
|
|
66
81
|
"Nemotron model doesn't support token counter."
|
|
67
82
|
)
|
|
68
|
-
|
|
69
|
-
def check_model_config(self):
|
|
70
|
-
raise NotImplementedError(
|
|
71
|
-
"Nemotron model doesn't support model config."
|
|
72
|
-
)
|
camel/models/netmind_model.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# ========= Copyright 2023-
|
|
1
|
+
# ========= Copyright 2023-2025 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
2
2
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
3
|
# you may not use this file except in compliance with the License.
|
|
4
4
|
# You may obtain a copy of the License at
|
|
@@ -10,12 +10,12 @@
|
|
|
10
10
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
11
|
# See the License for the specific language governing permissions and
|
|
12
12
|
# limitations under the License.
|
|
13
|
-
# ========= Copyright 2023-
|
|
13
|
+
# ========= Copyright 2023-2025 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
14
|
|
|
15
15
|
import os
|
|
16
16
|
from typing import Any, Dict, Optional, Union
|
|
17
17
|
|
|
18
|
-
from camel.configs import
|
|
18
|
+
from camel.configs import NetmindConfig
|
|
19
19
|
from camel.models.openai_compatible_model import OpenAICompatibleModel
|
|
20
20
|
from camel.types import ModelType
|
|
21
21
|
from camel.utils import (
|
|
@@ -47,6 +47,10 @@ class NetmindModel(OpenAICompatibleModel):
|
|
|
47
47
|
API calls. If not provided, will fall back to the MODEL_TIMEOUT
|
|
48
48
|
environment variable or default to 180 seconds.
|
|
49
49
|
(default: :obj:`None`)
|
|
50
|
+
max_retries (int, optional): Maximum number of retries for API calls.
|
|
51
|
+
(default: :obj:`3`)
|
|
52
|
+
**kwargs (Any): Additional arguments to pass to the client
|
|
53
|
+
initialization.
|
|
50
54
|
"""
|
|
51
55
|
|
|
52
56
|
@api_keys_required(
|
|
@@ -62,6 +66,8 @@ class NetmindModel(OpenAICompatibleModel):
|
|
|
62
66
|
url: Optional[str] = None,
|
|
63
67
|
token_counter: Optional[BaseTokenCounter] = None,
|
|
64
68
|
timeout: Optional[float] = None,
|
|
69
|
+
max_retries: int = 3,
|
|
70
|
+
**kwargs: Any,
|
|
65
71
|
) -> None:
|
|
66
72
|
if model_config_dict is None:
|
|
67
73
|
model_config_dict = NetmindConfig().as_dict()
|
|
@@ -78,19 +84,6 @@ class NetmindModel(OpenAICompatibleModel):
|
|
|
78
84
|
url=url,
|
|
79
85
|
token_counter=token_counter,
|
|
80
86
|
timeout=timeout,
|
|
87
|
+
max_retries=max_retries,
|
|
88
|
+
**kwargs,
|
|
81
89
|
)
|
|
82
|
-
|
|
83
|
-
def check_model_config(self):
|
|
84
|
-
r"""Check whether the model configuration contains any
|
|
85
|
-
unexpected arguments to NETMIND API.
|
|
86
|
-
|
|
87
|
-
Raises:
|
|
88
|
-
ValueError: If the model configuration dictionary contains any
|
|
89
|
-
unexpected arguments to NETMIND API.
|
|
90
|
-
"""
|
|
91
|
-
for param in self.model_config_dict:
|
|
92
|
-
if param not in NETMIND_API_PARAMS:
|
|
93
|
-
raise ValueError(
|
|
94
|
-
f"Unexpected argument `{param}` is "
|
|
95
|
-
"input into NETMIND model backend."
|
|
96
|
-
)
|
camel/models/novita_model.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# ========= Copyright 2023-
|
|
1
|
+
# ========= Copyright 2023-2025 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
2
2
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
3
|
# you may not use this file except in compliance with the License.
|
|
4
4
|
# You may obtain a copy of the License at
|
|
@@ -10,12 +10,12 @@
|
|
|
10
10
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
11
|
# See the License for the specific language governing permissions and
|
|
12
12
|
# limitations under the License.
|
|
13
|
-
# ========= Copyright 2023-
|
|
13
|
+
# ========= Copyright 2023-2025 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
14
|
|
|
15
15
|
import os
|
|
16
16
|
from typing import Any, Dict, Optional, Union
|
|
17
17
|
|
|
18
|
-
from camel.configs import
|
|
18
|
+
from camel.configs import NovitaConfig
|
|
19
19
|
from camel.models.openai_compatible_model import OpenAICompatibleModel
|
|
20
20
|
from camel.types import ModelType
|
|
21
21
|
from camel.utils import (
|
|
@@ -47,6 +47,10 @@ class NovitaModel(OpenAICompatibleModel):
|
|
|
47
47
|
API calls. If not provided, will fall back to the MODEL_TIMEOUT
|
|
48
48
|
environment variable or default to 180 seconds.
|
|
49
49
|
(default: :obj:`None`)
|
|
50
|
+
max_retries (int, optional): Maximum number of retries for API calls.
|
|
51
|
+
(default: :obj:`3`)
|
|
52
|
+
**kwargs (Any): Additional arguments to pass to the client
|
|
53
|
+
initialization.
|
|
50
54
|
"""
|
|
51
55
|
|
|
52
56
|
@api_keys_required(
|
|
@@ -62,6 +66,8 @@ class NovitaModel(OpenAICompatibleModel):
|
|
|
62
66
|
url: Optional[str] = None,
|
|
63
67
|
token_counter: Optional[BaseTokenCounter] = None,
|
|
64
68
|
timeout: Optional[float] = None,
|
|
69
|
+
max_retries: int = 3,
|
|
70
|
+
**kwargs: Any,
|
|
65
71
|
) -> None:
|
|
66
72
|
if model_config_dict is None:
|
|
67
73
|
model_config_dict = NovitaConfig().as_dict()
|
|
@@ -77,19 +83,6 @@ class NovitaModel(OpenAICompatibleModel):
|
|
|
77
83
|
url=url,
|
|
78
84
|
token_counter=token_counter,
|
|
79
85
|
timeout=timeout,
|
|
86
|
+
max_retries=max_retries,
|
|
87
|
+
**kwargs,
|
|
80
88
|
)
|
|
81
|
-
|
|
82
|
-
def check_model_config(self):
|
|
83
|
-
r"""Check whether the model configuration contains any
|
|
84
|
-
unexpected arguments to Novita API.
|
|
85
|
-
|
|
86
|
-
Raises:
|
|
87
|
-
ValueError: If the model configuration dictionary contains any
|
|
88
|
-
unexpected arguments to Novita API.
|
|
89
|
-
"""
|
|
90
|
-
for param in self.model_config_dict:
|
|
91
|
-
if param not in NOVITA_API_PARAMS:
|
|
92
|
-
raise ValueError(
|
|
93
|
-
f"Unexpected argument `{param}` is "
|
|
94
|
-
"input into Novita model backend."
|
|
95
|
-
)
|
camel/models/nvidia_model.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# ========= Copyright 2023-
|
|
1
|
+
# ========= Copyright 2023-2025 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
2
2
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
3
|
# you may not use this file except in compliance with the License.
|
|
4
4
|
# You may obtain a copy of the License at
|
|
@@ -10,12 +10,12 @@
|
|
|
10
10
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
11
|
# See the License for the specific language governing permissions and
|
|
12
12
|
# limitations under the License.
|
|
13
|
-
# ========= Copyright 2023-
|
|
13
|
+
# ========= Copyright 2023-2025 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
14
|
|
|
15
15
|
import os
|
|
16
16
|
from typing import Any, Dict, Optional, Union
|
|
17
17
|
|
|
18
|
-
from camel.configs import
|
|
18
|
+
from camel.configs import NvidiaConfig
|
|
19
19
|
from camel.models.openai_compatible_model import OpenAICompatibleModel
|
|
20
20
|
from camel.types import ModelType
|
|
21
21
|
from camel.utils import BaseTokenCounter, api_keys_required
|
|
@@ -43,6 +43,10 @@ class NvidiaModel(OpenAICompatibleModel):
|
|
|
43
43
|
API calls. If not provided, will fall back to the MODEL_TIMEOUT
|
|
44
44
|
environment variable or default to 180 seconds.
|
|
45
45
|
(default: :obj:`None`)
|
|
46
|
+
max_retries (int, optional): Maximum number of retries for API calls.
|
|
47
|
+
(default: :obj:`3`)
|
|
48
|
+
**kwargs (Any): Additional arguments to pass to the client
|
|
49
|
+
initialization.
|
|
46
50
|
"""
|
|
47
51
|
|
|
48
52
|
@api_keys_required(
|
|
@@ -58,6 +62,8 @@ class NvidiaModel(OpenAICompatibleModel):
|
|
|
58
62
|
url: Optional[str] = None,
|
|
59
63
|
token_counter: Optional[BaseTokenCounter] = None,
|
|
60
64
|
timeout: Optional[float] = None,
|
|
65
|
+
max_retries: int = 3,
|
|
66
|
+
**kwargs: Any,
|
|
61
67
|
) -> None:
|
|
62
68
|
if model_config_dict is None:
|
|
63
69
|
model_config_dict = NvidiaConfig().as_dict()
|
|
@@ -73,19 +79,6 @@ class NvidiaModel(OpenAICompatibleModel):
|
|
|
73
79
|
url=url,
|
|
74
80
|
token_counter=token_counter,
|
|
75
81
|
timeout=timeout,
|
|
82
|
+
max_retries=max_retries,
|
|
83
|
+
**kwargs,
|
|
76
84
|
)
|
|
77
|
-
|
|
78
|
-
def check_model_config(self):
|
|
79
|
-
r"""Check whether the model configuration contains any
|
|
80
|
-
unexpected arguments to NVIDIA API.
|
|
81
|
-
|
|
82
|
-
Raises:
|
|
83
|
-
ValueError: If the model configuration dictionary contains any
|
|
84
|
-
unexpected arguments to NVIDIA API.
|
|
85
|
-
"""
|
|
86
|
-
for param in self.model_config_dict:
|
|
87
|
-
if param not in NVIDIA_API_PARAMS:
|
|
88
|
-
raise ValueError(
|
|
89
|
-
f"Unexpected argument `{param}` is "
|
|
90
|
-
"input into NVIDIA model backend."
|
|
91
|
-
)
|
camel/models/ollama_model.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# ========= Copyright 2023-
|
|
1
|
+
# ========= Copyright 2023-2025 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
2
2
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
3
|
# you may not use this file except in compliance with the License.
|
|
4
4
|
# You may obtain a copy of the License at
|
|
@@ -10,12 +10,12 @@
|
|
|
10
10
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
11
|
# See the License for the specific language governing permissions and
|
|
12
12
|
# limitations under the License.
|
|
13
|
-
# ========= Copyright 2023-
|
|
13
|
+
# ========= Copyright 2023-2025 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
14
|
import os
|
|
15
15
|
import subprocess
|
|
16
16
|
from typing import Any, Dict, Optional, Union
|
|
17
17
|
|
|
18
|
-
from camel.configs import
|
|
18
|
+
from camel.configs import OllamaConfig
|
|
19
19
|
from camel.logger import get_logger
|
|
20
20
|
from camel.models.openai_compatible_model import OpenAICompatibleModel
|
|
21
21
|
from camel.types import ModelType
|
|
@@ -35,8 +35,8 @@ class OllamaModel(OpenAICompatibleModel):
|
|
|
35
35
|
If:obj:`None`, :obj:`OllamaConfig().as_dict()` will be used.
|
|
36
36
|
(default: :obj:`None`)
|
|
37
37
|
api_key (Optional[str], optional): The API key for authenticating with
|
|
38
|
-
the model service.
|
|
39
|
-
|
|
38
|
+
the model service. Required for Ollama cloud services. If not
|
|
39
|
+
provided, defaults to "Not_Provided". (default: :obj:`None`)
|
|
40
40
|
url (Optional[str], optional): The url to the model service.
|
|
41
41
|
(default: :obj:`None`)
|
|
42
42
|
token_counter (Optional[BaseTokenCounter], optional): Token counter to
|
|
@@ -47,6 +47,10 @@ class OllamaModel(OpenAICompatibleModel):
|
|
|
47
47
|
API calls. If not provided, will fall back to the MODEL_TIMEOUT
|
|
48
48
|
environment variable or default to 180 seconds.
|
|
49
49
|
(default: :obj:`None`)
|
|
50
|
+
max_retries (int, optional): Maximum number of retries for API calls.
|
|
51
|
+
(default: :obj:`3`)
|
|
52
|
+
**kwargs (Any): Additional arguments to pass to the client
|
|
53
|
+
initialization.
|
|
50
54
|
|
|
51
55
|
References:
|
|
52
56
|
https://github.com/ollama/ollama/blob/main/docs/openai.md
|
|
@@ -60,6 +64,8 @@ class OllamaModel(OpenAICompatibleModel):
|
|
|
60
64
|
url: Optional[str] = None,
|
|
61
65
|
token_counter: Optional[BaseTokenCounter] = None,
|
|
62
66
|
timeout: Optional[float] = None,
|
|
67
|
+
max_retries: int = 3,
|
|
68
|
+
**kwargs: Any,
|
|
63
69
|
) -> None:
|
|
64
70
|
if model_config_dict is None:
|
|
65
71
|
model_config_dict = OllamaConfig().as_dict()
|
|
@@ -73,10 +79,12 @@ class OllamaModel(OpenAICompatibleModel):
|
|
|
73
79
|
super().__init__(
|
|
74
80
|
model_type=self._model_type,
|
|
75
81
|
model_config_dict=model_config_dict,
|
|
76
|
-
api_key="
|
|
82
|
+
api_key=api_key or "Not_Provided",
|
|
77
83
|
url=self._url,
|
|
78
84
|
token_counter=token_counter,
|
|
79
85
|
timeout=timeout,
|
|
86
|
+
max_retries=max_retries,
|
|
87
|
+
**kwargs,
|
|
80
88
|
)
|
|
81
89
|
|
|
82
90
|
def _start_server(self) -> None:
|
|
@@ -94,18 +102,3 @@ class OllamaModel(OpenAICompatibleModel):
|
|
|
94
102
|
)
|
|
95
103
|
except Exception as e:
|
|
96
104
|
logger.error(f"Failed to start Ollama server: {e}.")
|
|
97
|
-
|
|
98
|
-
def check_model_config(self):
|
|
99
|
-
r"""Check whether the model configuration contains any
|
|
100
|
-
unexpected arguments to Ollama API.
|
|
101
|
-
|
|
102
|
-
Raises:
|
|
103
|
-
ValueError: If the model configuration dictionary contains any
|
|
104
|
-
unexpected arguments to OpenAI API.
|
|
105
|
-
"""
|
|
106
|
-
for param in self.model_config_dict:
|
|
107
|
-
if param not in OLLAMA_API_PARAMS:
|
|
108
|
-
raise ValueError(
|
|
109
|
-
f"Unexpected argument `{param}` is "
|
|
110
|
-
"input into Ollama model backend."
|
|
111
|
-
)
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# ========= Copyright 2023-
|
|
1
|
+
# ========= Copyright 2023-2025 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
2
2
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
3
|
# you may not use this file except in compliance with the License.
|
|
4
4
|
# You may obtain a copy of the License at
|
|
@@ -10,7 +10,7 @@
|
|
|
10
10
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
11
|
# See the License for the specific language governing permissions and
|
|
12
12
|
# limitations under the License.
|
|
13
|
-
# ========= Copyright 2023-
|
|
13
|
+
# ========= Copyright 2023-2025 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
14
|
import base64
|
|
15
15
|
import os
|
|
16
16
|
from typing import Any, List, Optional, Union
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# ========= Copyright 2023-
|
|
1
|
+
# ========= Copyright 2023-2025 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
2
2
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
3
|
# you may not use this file except in compliance with the License.
|
|
4
4
|
# You may obtain a copy of the License at
|
|
@@ -10,13 +10,17 @@
|
|
|
10
10
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
11
|
# See the License for the specific language governing permissions and
|
|
12
12
|
# limitations under the License.
|
|
13
|
-
# ========= Copyright 2023-
|
|
13
|
+
# ========= Copyright 2023-2025 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
14
|
|
|
15
15
|
import os
|
|
16
16
|
from json import JSONDecodeError
|
|
17
17
|
from typing import Any, Dict, List, Optional, Type, Union
|
|
18
18
|
|
|
19
19
|
from openai import AsyncOpenAI, AsyncStream, BadRequestError, OpenAI, Stream
|
|
20
|
+
from openai.lib.streaming.chat import (
|
|
21
|
+
AsyncChatCompletionStreamManager,
|
|
22
|
+
ChatCompletionStreamManager,
|
|
23
|
+
)
|
|
20
24
|
from pydantic import BaseModel, ValidationError
|
|
21
25
|
|
|
22
26
|
from camel.logger import get_logger
|
|
@@ -41,6 +45,11 @@ if os.environ.get("LANGFUSE_ENABLED", "False").lower() == "true":
|
|
|
41
45
|
from langfuse.decorators import observe
|
|
42
46
|
except ImportError:
|
|
43
47
|
from camel.utils import observe
|
|
48
|
+
elif os.environ.get("TRACEROOT_ENABLED", "False").lower() == "true":
|
|
49
|
+
try:
|
|
50
|
+
from traceroot import trace as observe # type: ignore[import]
|
|
51
|
+
except ImportError:
|
|
52
|
+
from camel.utils import observe
|
|
44
53
|
else:
|
|
45
54
|
from camel.utils import observe
|
|
46
55
|
|
|
@@ -67,6 +76,23 @@ class OpenAICompatibleModel(BaseModelBackend):
|
|
|
67
76
|
API calls. If not provided, will fall back to the MODEL_TIMEOUT
|
|
68
77
|
environment variable or default to 180 seconds.
|
|
69
78
|
(default: :obj:`None`)
|
|
79
|
+
max_retries (int, optional): Maximum number of retries for API calls.
|
|
80
|
+
(default: :obj:`3`)
|
|
81
|
+
client (Optional[Any], optional): A custom synchronous
|
|
82
|
+
OpenAI-compatible client instance. If provided, this client will
|
|
83
|
+
be used instead of creating a new one. Useful for RL frameworks
|
|
84
|
+
like AReaL or rLLM that provide OpenAI-compatible clients (e.g.,
|
|
85
|
+
ArealOpenAI). The client should implement the OpenAI client
|
|
86
|
+
interface with `.chat.completions.create()` and `.beta.chat.
|
|
87
|
+
completions.parse()` methods. (default: :obj:`None`)
|
|
88
|
+
async_client (Optional[Any], optional): A custom asynchronous
|
|
89
|
+
OpenAI-compatible client instance. If provided, this client will
|
|
90
|
+
be used instead of creating a new one. The client should implement
|
|
91
|
+
the AsyncOpenAI client interface. (default: :obj:`None`)
|
|
92
|
+
**kwargs (Any): Additional arguments to pass to the
|
|
93
|
+
OpenAI client initialization. These can include parameters like
|
|
94
|
+
'organization', 'default_headers', 'http_client', etc.
|
|
95
|
+
Ignored if custom clients are provided.
|
|
70
96
|
"""
|
|
71
97
|
|
|
72
98
|
def __init__(
|
|
@@ -77,42 +103,73 @@ class OpenAICompatibleModel(BaseModelBackend):
|
|
|
77
103
|
url: Optional[str] = None,
|
|
78
104
|
token_counter: Optional[BaseTokenCounter] = None,
|
|
79
105
|
timeout: Optional[float] = None,
|
|
106
|
+
max_retries: int = 3,
|
|
107
|
+
client: Optional[Any] = None,
|
|
108
|
+
async_client: Optional[Any] = None,
|
|
109
|
+
**kwargs: Any,
|
|
80
110
|
) -> None:
|
|
81
111
|
api_key = api_key or os.environ.get("OPENAI_COMPATIBILITY_API_KEY")
|
|
82
112
|
url = url or os.environ.get("OPENAI_COMPATIBILITY_API_BASE_URL")
|
|
83
113
|
timeout = timeout or float(os.environ.get("MODEL_TIMEOUT", 180))
|
|
114
|
+
|
|
84
115
|
super().__init__(
|
|
85
|
-
model_type,
|
|
116
|
+
model_type,
|
|
117
|
+
model_config_dict,
|
|
118
|
+
api_key,
|
|
119
|
+
url,
|
|
120
|
+
token_counter,
|
|
121
|
+
timeout,
|
|
122
|
+
max_retries,
|
|
86
123
|
)
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
self._client =
|
|
92
|
-
timeout=self._timeout,
|
|
93
|
-
max_retries=3,
|
|
94
|
-
base_url=self._url,
|
|
95
|
-
api_key=self._api_key,
|
|
96
|
-
)
|
|
97
|
-
self._async_client = LangfuseAsyncOpenAI(
|
|
98
|
-
timeout=self._timeout,
|
|
99
|
-
max_retries=3,
|
|
100
|
-
base_url=self._url,
|
|
101
|
-
api_key=self._api_key,
|
|
102
|
-
)
|
|
124
|
+
|
|
125
|
+
# Use custom clients if provided, otherwise create new ones
|
|
126
|
+
if client is not None:
|
|
127
|
+
# Use the provided custom sync client
|
|
128
|
+
self._client = client
|
|
103
129
|
else:
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
130
|
+
# Create default sync client
|
|
131
|
+
if is_langfuse_available():
|
|
132
|
+
from langfuse.openai import OpenAI as LangfuseOpenAI
|
|
133
|
+
|
|
134
|
+
self._client = LangfuseOpenAI(
|
|
135
|
+
timeout=self._timeout,
|
|
136
|
+
max_retries=max_retries,
|
|
137
|
+
base_url=self._url,
|
|
138
|
+
api_key=self._api_key,
|
|
139
|
+
**kwargs,
|
|
140
|
+
)
|
|
141
|
+
else:
|
|
142
|
+
self._client = OpenAI(
|
|
143
|
+
timeout=self._timeout,
|
|
144
|
+
max_retries=max_retries,
|
|
145
|
+
base_url=self._url,
|
|
146
|
+
api_key=self._api_key,
|
|
147
|
+
**kwargs,
|
|
148
|
+
)
|
|
149
|
+
|
|
150
|
+
if async_client is not None:
|
|
151
|
+
# Use the provided custom async client
|
|
152
|
+
self._async_client = async_client
|
|
153
|
+
else:
|
|
154
|
+
# Create default async client
|
|
155
|
+
if is_langfuse_available():
|
|
156
|
+
from langfuse.openai import AsyncOpenAI as LangfuseAsyncOpenAI
|
|
157
|
+
|
|
158
|
+
self._async_client = LangfuseAsyncOpenAI(
|
|
159
|
+
timeout=self._timeout,
|
|
160
|
+
max_retries=max_retries,
|
|
161
|
+
base_url=self._url,
|
|
162
|
+
api_key=self._api_key,
|
|
163
|
+
**kwargs,
|
|
164
|
+
)
|
|
165
|
+
else:
|
|
166
|
+
self._async_client = AsyncOpenAI(
|
|
167
|
+
timeout=self._timeout,
|
|
168
|
+
max_retries=max_retries,
|
|
169
|
+
base_url=self._url,
|
|
170
|
+
api_key=self._api_key,
|
|
171
|
+
**kwargs,
|
|
172
|
+
)
|
|
116
173
|
|
|
117
174
|
@observe()
|
|
118
175
|
def _run(
|
|
@@ -120,7 +177,11 @@ class OpenAICompatibleModel(BaseModelBackend):
|
|
|
120
177
|
messages: List[OpenAIMessage],
|
|
121
178
|
response_format: Optional[Type[BaseModel]] = None,
|
|
122
179
|
tools: Optional[List[Dict[str, Any]]] = None,
|
|
123
|
-
) -> Union[
|
|
180
|
+
) -> Union[
|
|
181
|
+
ChatCompletion,
|
|
182
|
+
Stream[ChatCompletionChunk],
|
|
183
|
+
ChatCompletionStreamManager[BaseModel],
|
|
184
|
+
]:
|
|
124
185
|
r"""Runs inference of OpenAI chat completion.
|
|
125
186
|
|
|
126
187
|
Args:
|
|
@@ -135,6 +196,8 @@ class OpenAICompatibleModel(BaseModelBackend):
|
|
|
135
196
|
Union[ChatCompletion, Stream[ChatCompletionChunk]]:
|
|
136
197
|
`ChatCompletion` in the non-stream mode, or
|
|
137
198
|
`Stream[ChatCompletionChunk]` in the stream mode.
|
|
199
|
+
`ChatCompletionStreamManager[BaseModel]` for
|
|
200
|
+
structured output streaming.
|
|
138
201
|
"""
|
|
139
202
|
|
|
140
203
|
# Update Langfuse trace with current agent session and metadata
|
|
@@ -152,10 +215,19 @@ class OpenAICompatibleModel(BaseModelBackend):
|
|
|
152
215
|
response_format = response_format or self.model_config_dict.get(
|
|
153
216
|
"response_format", None
|
|
154
217
|
)
|
|
218
|
+
|
|
219
|
+
# Check if streaming is enabled
|
|
220
|
+
is_streaming = self.model_config_dict.get("stream", False)
|
|
221
|
+
|
|
155
222
|
if response_format:
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
223
|
+
if is_streaming:
|
|
224
|
+
# Use streaming parse for structured output
|
|
225
|
+
return self._request_stream_parse(
|
|
226
|
+
messages, response_format, tools
|
|
227
|
+
)
|
|
228
|
+
else:
|
|
229
|
+
# Use non-streaming parse for structured output
|
|
230
|
+
return self._request_parse(messages, response_format, tools)
|
|
159
231
|
else:
|
|
160
232
|
result = self._request_chat_completion(messages, tools)
|
|
161
233
|
|
|
@@ -167,7 +239,11 @@ class OpenAICompatibleModel(BaseModelBackend):
|
|
|
167
239
|
messages: List[OpenAIMessage],
|
|
168
240
|
response_format: Optional[Type[BaseModel]] = None,
|
|
169
241
|
tools: Optional[List[Dict[str, Any]]] = None,
|
|
170
|
-
) -> Union[
|
|
242
|
+
) -> Union[
|
|
243
|
+
ChatCompletion,
|
|
244
|
+
AsyncStream[ChatCompletionChunk],
|
|
245
|
+
AsyncChatCompletionStreamManager[BaseModel],
|
|
246
|
+
]:
|
|
171
247
|
r"""Runs inference of OpenAI chat completion in async mode.
|
|
172
248
|
|
|
173
249
|
Args:
|
|
@@ -179,9 +255,12 @@ class OpenAICompatibleModel(BaseModelBackend):
|
|
|
179
255
|
use for the request.
|
|
180
256
|
|
|
181
257
|
Returns:
|
|
182
|
-
Union[ChatCompletion, AsyncStream[ChatCompletionChunk]
|
|
183
|
-
|
|
184
|
-
`
|
|
258
|
+
Union[ChatCompletion, AsyncStream[ChatCompletionChunk],
|
|
259
|
+
AsyncChatCompletionStreamManager[BaseModel]]:
|
|
260
|
+
`ChatCompletion` in the non-stream mode,
|
|
261
|
+
`AsyncStream[ChatCompletionChunk]` in the stream mode,
|
|
262
|
+
or `AsyncChatCompletionStreamManager[BaseModel]` for
|
|
263
|
+
structured output streaming.
|
|
185
264
|
"""
|
|
186
265
|
|
|
187
266
|
# Update Langfuse trace with current agent session and metadata
|
|
@@ -199,10 +278,21 @@ class OpenAICompatibleModel(BaseModelBackend):
|
|
|
199
278
|
response_format = response_format or self.model_config_dict.get(
|
|
200
279
|
"response_format", None
|
|
201
280
|
)
|
|
281
|
+
|
|
282
|
+
# Check if streaming is enabled
|
|
283
|
+
is_streaming = self.model_config_dict.get("stream", False)
|
|
284
|
+
|
|
202
285
|
if response_format:
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
286
|
+
if is_streaming:
|
|
287
|
+
# Use streaming parse for structured output
|
|
288
|
+
return await self._arequest_stream_parse(
|
|
289
|
+
messages, response_format, tools
|
|
290
|
+
)
|
|
291
|
+
else:
|
|
292
|
+
# Use non-streaming parse for structured output
|
|
293
|
+
return await self._arequest_parse(
|
|
294
|
+
messages, response_format, tools
|
|
295
|
+
)
|
|
206
296
|
else:
|
|
207
297
|
result = await self._arequest_chat_completion(messages, tools)
|
|
208
298
|
|
|
@@ -318,6 +408,62 @@ class OpenAICompatibleModel(BaseModelBackend):
|
|
|
318
408
|
logger.error(f"Fallback attempt also failed: {e}")
|
|
319
409
|
raise
|
|
320
410
|
|
|
411
|
+
def _request_stream_parse(
|
|
412
|
+
self,
|
|
413
|
+
messages: List[OpenAIMessage],
|
|
414
|
+
response_format: Type[BaseModel],
|
|
415
|
+
tools: Optional[List[Dict[str, Any]]] = None,
|
|
416
|
+
) -> ChatCompletionStreamManager[BaseModel]:
|
|
417
|
+
r"""Request streaming structured output parsing.
|
|
418
|
+
|
|
419
|
+
Note: This uses OpenAI's beta streaming API for structured outputs.
|
|
420
|
+
"""
|
|
421
|
+
import copy
|
|
422
|
+
|
|
423
|
+
request_config = copy.deepcopy(self.model_config_dict)
|
|
424
|
+
|
|
425
|
+
# Remove stream from config as it's handled by the stream method
|
|
426
|
+
request_config.pop("stream", None)
|
|
427
|
+
|
|
428
|
+
if tools is not None:
|
|
429
|
+
request_config["tools"] = tools
|
|
430
|
+
|
|
431
|
+
# Use the beta streaming API for structured outputs
|
|
432
|
+
return self._client.beta.chat.completions.stream(
|
|
433
|
+
messages=messages,
|
|
434
|
+
model=self.model_type,
|
|
435
|
+
response_format=response_format,
|
|
436
|
+
**request_config,
|
|
437
|
+
)
|
|
438
|
+
|
|
439
|
+
async def _arequest_stream_parse(
|
|
440
|
+
self,
|
|
441
|
+
messages: List[OpenAIMessage],
|
|
442
|
+
response_format: Type[BaseModel],
|
|
443
|
+
tools: Optional[List[Dict[str, Any]]] = None,
|
|
444
|
+
) -> AsyncChatCompletionStreamManager[BaseModel]:
|
|
445
|
+
r"""Request async streaming structured output parsing.
|
|
446
|
+
|
|
447
|
+
Note: This uses OpenAI's beta streaming API for structured outputs.
|
|
448
|
+
"""
|
|
449
|
+
import copy
|
|
450
|
+
|
|
451
|
+
request_config = copy.deepcopy(self.model_config_dict)
|
|
452
|
+
|
|
453
|
+
# Remove stream from config as it's handled by the stream method
|
|
454
|
+
request_config.pop("stream", None)
|
|
455
|
+
|
|
456
|
+
if tools is not None:
|
|
457
|
+
request_config["tools"] = tools
|
|
458
|
+
|
|
459
|
+
# Use the beta streaming API for structured outputs
|
|
460
|
+
return self._async_client.beta.chat.completions.stream(
|
|
461
|
+
messages=messages,
|
|
462
|
+
model=self.model_type,
|
|
463
|
+
response_format=response_format,
|
|
464
|
+
**request_config,
|
|
465
|
+
)
|
|
466
|
+
|
|
321
467
|
@property
|
|
322
468
|
def token_counter(self) -> BaseTokenCounter:
|
|
323
469
|
r"""Initialize the token counter for the model backend.
|
|
@@ -340,6 +486,3 @@ class OpenAICompatibleModel(BaseModelBackend):
|
|
|
340
486
|
bool: Whether the model is in stream mode.
|
|
341
487
|
"""
|
|
342
488
|
return self.model_config_dict.get('stream', False)
|
|
343
|
-
|
|
344
|
-
def check_model_config(self):
|
|
345
|
-
pass
|