camel-ai 0.1.5.1__tar.gz → 0.1.5.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of camel-ai might be problematic. Click here for more details.

Files changed (152) hide show
  1. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/PKG-INFO +82 -53
  2. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/README.md +70 -51
  3. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/agents/__init__.py +2 -0
  4. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/agents/chat_agent.py +237 -52
  5. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/agents/critic_agent.py +6 -9
  6. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/agents/deductive_reasoner_agent.py +93 -40
  7. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/agents/embodied_agent.py +6 -9
  8. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/agents/knowledge_graph_agent.py +49 -27
  9. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/agents/role_assignment_agent.py +14 -12
  10. camel_ai-0.1.5.3/camel/agents/search_agent.py +122 -0
  11. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/agents/task_agent.py +26 -38
  12. camel_ai-0.1.5.3/camel/bots/__init__.py +20 -0
  13. camel_ai-0.1.5.3/camel/bots/discord_bot.py +103 -0
  14. camel_ai-0.1.5.3/camel/bots/telegram_bot.py +84 -0
  15. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/configs/__init__.py +3 -0
  16. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/configs/anthropic_config.py +1 -1
  17. camel_ai-0.1.5.3/camel/configs/litellm_config.py +113 -0
  18. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/configs/openai_config.py +14 -0
  19. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/embeddings/__init__.py +2 -0
  20. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/embeddings/openai_embedding.py +2 -2
  21. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/embeddings/sentence_transformers_embeddings.py +6 -5
  22. camel_ai-0.1.5.3/camel/embeddings/vlm_embedding.py +146 -0
  23. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/functions/__init__.py +9 -0
  24. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/functions/open_api_function.py +161 -33
  25. camel_ai-0.1.5.3/camel/functions/open_api_specs/biztoc/ai-plugin.json +34 -0
  26. camel_ai-0.1.5.3/camel/functions/open_api_specs/biztoc/openapi.yaml +21 -0
  27. camel_ai-0.1.5.3/camel/functions/open_api_specs/create_qr_code/openapi.yaml +44 -0
  28. camel_ai-0.1.5.3/camel/functions/open_api_specs/klarna/__init__.py +13 -0
  29. camel_ai-0.1.5.3/camel/functions/open_api_specs/nasa_apod/__init__.py +13 -0
  30. camel_ai-0.1.5.3/camel/functions/open_api_specs/nasa_apod/openapi.yaml +72 -0
  31. camel_ai-0.1.5.3/camel/functions/open_api_specs/outschool/__init__.py +13 -0
  32. camel_ai-0.1.5.3/camel/functions/open_api_specs/outschool/ai-plugin.json +34 -0
  33. camel_ai-0.1.5.3/camel/functions/open_api_specs/outschool/openapi.yaml +1 -0
  34. camel_ai-0.1.5.3/camel/functions/open_api_specs/outschool/paths/__init__.py +14 -0
  35. camel_ai-0.1.5.3/camel/functions/open_api_specs/outschool/paths/get_classes.py +29 -0
  36. camel_ai-0.1.5.3/camel/functions/open_api_specs/outschool/paths/search_teachers.py +29 -0
  37. camel_ai-0.1.5.3/camel/functions/open_api_specs/security_config.py +21 -0
  38. camel_ai-0.1.5.3/camel/functions/open_api_specs/speak/__init__.py +13 -0
  39. camel_ai-0.1.5.3/camel/functions/open_api_specs/web_scraper/__init__.py +13 -0
  40. camel_ai-0.1.5.3/camel/functions/open_api_specs/web_scraper/ai-plugin.json +34 -0
  41. camel_ai-0.1.5.3/camel/functions/open_api_specs/web_scraper/openapi.yaml +71 -0
  42. camel_ai-0.1.5.3/camel/functions/open_api_specs/web_scraper/paths/__init__.py +13 -0
  43. camel_ai-0.1.5.3/camel/functions/open_api_specs/web_scraper/paths/scraper.py +29 -0
  44. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/functions/openai_function.py +3 -1
  45. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/functions/search_functions.py +104 -171
  46. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/functions/slack_functions.py +16 -3
  47. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/human.py +3 -1
  48. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/loaders/base_io.py +3 -1
  49. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/loaders/unstructured_io.py +16 -22
  50. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/messages/base.py +135 -46
  51. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/models/__init__.py +8 -0
  52. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/models/anthropic_model.py +24 -16
  53. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/models/base_model.py +6 -1
  54. camel_ai-0.1.5.3/camel/models/litellm_model.py +112 -0
  55. camel_ai-0.1.5.3/camel/models/model_factory.py +93 -0
  56. camel_ai-0.1.5.3/camel/models/nemotron_model.py +71 -0
  57. camel_ai-0.1.5.3/camel/models/ollama_model.py +121 -0
  58. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/models/open_source_model.py +8 -2
  59. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/models/openai_model.py +14 -5
  60. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/models/stub_model.py +3 -1
  61. camel_ai-0.1.5.3/camel/models/zhipuai_model.py +125 -0
  62. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/prompts/__init__.py +6 -0
  63. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/prompts/base.py +2 -1
  64. camel_ai-0.1.5.3/camel/prompts/descripte_video_prompt.py +33 -0
  65. camel_ai-0.1.5.3/camel/prompts/generate_text_embedding_data.py +79 -0
  66. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/prompts/task_prompt_template.py +13 -3
  67. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/retrievers/auto_retriever.py +20 -11
  68. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/retrievers/base.py +4 -2
  69. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/retrievers/bm25_retriever.py +2 -1
  70. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/retrievers/cohere_rerank_retriever.py +2 -1
  71. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/retrievers/vector_retriever.py +10 -4
  72. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/societies/babyagi_playing.py +2 -1
  73. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/societies/role_playing.py +18 -20
  74. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/storages/graph_storages/base.py +1 -0
  75. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/storages/graph_storages/neo4j_graph.py +5 -3
  76. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/storages/vectordb_storages/base.py +2 -1
  77. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/storages/vectordb_storages/milvus.py +5 -2
  78. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/toolkits/github_toolkit.py +120 -26
  79. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/types/__init__.py +5 -2
  80. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/types/enums.py +95 -4
  81. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/utils/__init__.py +11 -2
  82. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/utils/commons.py +78 -4
  83. camel_ai-0.1.5.3/camel/utils/constants.py +26 -0
  84. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/utils/token_counting.py +62 -7
  85. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/pyproject.toml +49 -7
  86. camel_ai-0.1.5.1/camel/models/model_factory.py +0 -65
  87. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/__init__.py +0 -0
  88. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/agents/base.py +0 -0
  89. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/agents/tool_agents/__init__.py +0 -0
  90. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/agents/tool_agents/base.py +0 -0
  91. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/agents/tool_agents/hugging_face_tool_agent.py +0 -0
  92. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/configs/base_config.py +0 -0
  93. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/embeddings/base.py +0 -0
  94. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/functions/google_maps_function.py +0 -0
  95. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/functions/math_functions.py +0 -0
  96. {camel_ai-0.1.5.1/camel/functions/open_api_specs/coursera → camel_ai-0.1.5.3/camel/functions/open_api_specs/biztoc}/__init__.py +0 -0
  97. {camel_ai-0.1.5.1/camel/functions/open_api_specs/klarna → camel_ai-0.1.5.3/camel/functions/open_api_specs/coursera}/__init__.py +0 -0
  98. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/functions/open_api_specs/coursera/openapi.yaml +0 -0
  99. {camel_ai-0.1.5.1/camel/functions/open_api_specs/speak → camel_ai-0.1.5.3/camel/functions/open_api_specs/create_qr_code}/__init__.py +0 -0
  100. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/functions/open_api_specs/klarna/openapi.yaml +0 -0
  101. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/functions/open_api_specs/speak/openapi.yaml +0 -0
  102. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/functions/retrieval_functions.py +0 -0
  103. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/functions/twitter_function.py +0 -0
  104. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/functions/weather_functions.py +0 -0
  105. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/generators.py +0 -0
  106. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/interpreters/__init__.py +0 -0
  107. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/interpreters/base.py +0 -0
  108. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/interpreters/internal_python_interpreter.py +0 -0
  109. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/interpreters/interpreter_error.py +0 -0
  110. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/interpreters/subprocess_interpreter.py +0 -0
  111. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/loaders/__init__.py +0 -0
  112. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/memories/__init__.py +0 -0
  113. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/memories/agent_memories.py +0 -0
  114. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/memories/base.py +0 -0
  115. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/memories/blocks/__init__.py +0 -0
  116. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/memories/blocks/chat_history_block.py +0 -0
  117. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/memories/blocks/vectordb_block.py +0 -0
  118. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/memories/context_creators/__init__.py +0 -0
  119. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/memories/context_creators/score_based.py +0 -0
  120. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/memories/records.py +0 -0
  121. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/messages/__init__.py +0 -0
  122. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/messages/func_message.py +0 -0
  123. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/models/openai_audio_models.py +0 -0
  124. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/prompts/ai_society.py +0 -0
  125. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/prompts/code.py +0 -0
  126. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/prompts/evaluation.py +0 -0
  127. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/prompts/misalignment.py +0 -0
  128. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/prompts/object_recognition.py +0 -0
  129. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/prompts/prompt_templates.py +0 -0
  130. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/prompts/role_description_prompt_template.py +0 -0
  131. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/prompts/solution_extraction.py +0 -0
  132. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/prompts/translation.py +0 -0
  133. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/responses/__init__.py +0 -0
  134. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/responses/agent_responses.py +0 -0
  135. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/retrievers/__init__.py +0 -0
  136. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/societies/__init__.py +0 -0
  137. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/storages/__init__.py +0 -0
  138. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/storages/graph_storages/__init__.py +0 -0
  139. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/storages/graph_storages/graph_element.py +0 -0
  140. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/storages/key_value_storages/__init__.py +0 -0
  141. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/storages/key_value_storages/base.py +0 -0
  142. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/storages/key_value_storages/in_memory.py +0 -0
  143. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/storages/key_value_storages/json.py +0 -0
  144. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/storages/vectordb_storages/__init__.py +0 -0
  145. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/storages/vectordb_storages/qdrant.py +0 -0
  146. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/terminators/__init__.py +0 -0
  147. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/terminators/base.py +0 -0
  148. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/terminators/response_terminator.py +0 -0
  149. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/terminators/token_limit_terminator.py +0 -0
  150. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/toolkits/__init__.py +0 -0
  151. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/toolkits/base.py +0 -0
  152. {camel_ai-0.1.5.1 → camel_ai-0.1.5.3}/camel/types/openai_types.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: camel-ai
3
- Version: 0.1.5.1
3
+ Version: 0.1.5.3
4
4
  Summary: Communicative Agents for AI Society Study
5
5
  Home-page: https://www.camel-ai.org/
6
6
  License: Apache-2.0
@@ -16,37 +16,47 @@ Provides-Extra: all
16
16
  Provides-Extra: encoders
17
17
  Provides-Extra: graph-storages
18
18
  Provides-Extra: huggingface-agent
19
+ Provides-Extra: model-platforms
19
20
  Provides-Extra: retrievers
20
21
  Provides-Extra: test
21
22
  Provides-Extra: tools
22
23
  Provides-Extra: vector-databases
23
24
  Requires-Dist: PyMuPDF (>=1.22.5,<2.0.0) ; extra == "tools" or extra == "all"
24
25
  Requires-Dist: accelerate (>=0,<1) ; extra == "huggingface-agent" or extra == "all"
25
- Requires-Dist: anthropic (>=0.21.3,<0.22.0)
26
+ Requires-Dist: anthropic (>=0.28.0,<0.29.0)
26
27
  Requires-Dist: beautifulsoup4 (>=4,<5) ; extra == "tools" or extra == "all"
27
28
  Requires-Dist: cohere (>=4.56,<5.0) ; extra == "retrievers" or extra == "all"
28
29
  Requires-Dist: colorama (>=0,<1)
30
+ Requires-Dist: curl_cffi (==0.6.2)
29
31
  Requires-Dist: datasets (>=2,<3) ; extra == "huggingface-agent" or extra == "all"
30
32
  Requires-Dist: diffusers (>=0,<1) ; extra == "huggingface-agent" or extra == "all"
33
+ Requires-Dist: discord.py (>=2.3.2,<3.0.0) ; extra == "tools" or extra == "all"
31
34
  Requires-Dist: docstring-parser (>=0.15,<0.16)
32
35
  Requires-Dist: docx2txt (>=0.8,<0.9) ; extra == "tools" or extra == "all"
36
+ Requires-Dist: duckduckgo-search (>=6.1.0,<7.0.0) ; extra == "tools" or extra == "all"
33
37
  Requires-Dist: googlemaps (>=4.10.0,<5.0.0) ; extra == "tools" or extra == "all"
38
+ Requires-Dist: imageio (>=2.34.1,<3.0.0) ; extra == "tools" or extra == "all"
34
39
  Requires-Dist: jsonschema (>=4,<5)
40
+ Requires-Dist: litellm (>=1.38.1,<2.0.0) ; extra == "model-platforms" or extra == "all"
35
41
  Requires-Dist: mock (>=5,<6) ; extra == "test"
36
42
  Requires-Dist: neo4j (>=5.18.0,<6.0.0) ; extra == "graph-storages" or extra == "all"
43
+ Requires-Dist: newspaper3k (>=0.2.8,<0.3.0) ; extra == "tools" or extra == "all"
37
44
  Requires-Dist: numpy (>=1,<2)
38
45
  Requires-Dist: openai (>=1.2.3,<2.0.0)
39
46
  Requires-Dist: openapi-spec-validator (>=0.7.1,<0.8.0) ; extra == "tools" or extra == "all"
40
47
  Requires-Dist: opencv-python (>=4,<5) ; extra == "huggingface-agent" or extra == "all"
41
48
  Requires-Dist: pathlib (>=1.0.1,<2.0.0)
49
+ Requires-Dist: pillow (>=10.2.0,<11.0.0) ; extra == "tools" or extra == "all"
42
50
  Requires-Dist: prance (>=23.6.21.0,<24.0.0.0) ; extra == "tools" or extra == "all"
43
51
  Requires-Dist: protobuf (>=4,<5)
52
+ Requires-Dist: pyTelegramBotAPI (>=4.18.0,<5.0.0) ; extra == "tools" or extra == "all"
44
53
  Requires-Dist: pydantic (>=1.9,<3)
45
54
  Requires-Dist: pydub (>=0.25.1,<0.26.0) ; extra == "tools" or extra == "all"
46
55
  Requires-Dist: pygithub (>=2.3.0,<3.0.0) ; extra == "tools" or extra == "all"
47
56
  Requires-Dist: pymilvus (>=2.4.0,<3.0.0) ; extra == "vector-databases" or extra == "all"
48
57
  Requires-Dist: pyowm (>=3.3.0,<4.0.0) ; extra == "tools" or extra == "all"
49
58
  Requires-Dist: pytest (>=7,<8) ; extra == "test"
59
+ Requires-Dist: pytest-asyncio (>=0.23.0,<0.24.0) ; extra == "test"
50
60
  Requires-Dist: qdrant-client (>=1.9.0,<2.0.0) ; extra == "vector-databases" or extra == "all"
51
61
  Requires-Dist: rank-bm25 (>=0.2.2,<0.3.0) ; extra == "retrievers" or extra == "all"
52
62
  Requires-Dist: requests_oauthlib (>=1.3.1,<2.0.0) ; extra == "tools" or extra == "all"
@@ -121,6 +131,10 @@ To install the base CAMEL library:
121
131
  pip install camel-ai
122
132
  ```
123
133
  Some features require extra dependencies:
134
+ - To install with all dependencies:
135
+ ```bash
136
+ pip install 'camel-ai[all]'
137
+ ```
124
138
  - To use the HuggingFace agents:
125
139
  ```bash
126
140
  pip install 'camel-ai[huggingface-agent]'
@@ -129,10 +143,6 @@ Some features require extra dependencies:
129
143
  ```bash
130
144
  pip install 'camel-ai[tools]'
131
145
  ```
132
- - To install with all dependencies:
133
- ```bash
134
- pip install 'camel-ai[all]'
135
- ```
136
146
 
137
147
  ### From Source
138
148
 
@@ -147,14 +157,20 @@ git clone https://github.com/camel-ai/camel.git
147
157
  # Change directory into project directory
148
158
  cd camel
149
159
 
150
- # Activate camel virtual environment
160
+ # If you didn't install peotry before
161
+ pip install poetry # (Optional)
162
+
163
+ # We suggest using python 3.10
164
+ poetry env use python3.10 # (Optional)
165
+
166
+ # Activate CAMEL virtual environment
151
167
  poetry shell
152
168
 
153
- # Install camel from source
154
- # It takes about 90 seconds to resolve dependencies
169
+ # Install the base CAMEL library
170
+ # It takes about 90 seconds
155
171
  poetry install
156
172
 
157
- # Or if you want to use all other extra packages
173
+ # Install CAMEL with all dependencies
158
174
  poetry install -E all # (Optional)
159
175
 
160
176
  # Exit the virtual environment
@@ -166,16 +182,16 @@ Install `CAMEL` from source with conda and pip:
166
182
  # Create a conda virtual environment
167
183
  conda create --name camel python=3.10
168
184
 
169
- # Activate camel conda environment
185
+ # Activate CAMEL conda environment
170
186
  conda activate camel
171
187
 
172
188
  # Clone github repo
173
- git clone -b v0.1.5.1 https://github.com/camel-ai/camel.git
189
+ git clone -b v0.1.5.3 https://github.com/camel-ai/camel.git
174
190
 
175
191
  # Change directory into project directory
176
192
  cd camel
177
193
 
178
- # Install camel from source
194
+ # Install CAMEL from source
179
195
  pip install -e .
180
196
 
181
197
  # Or if you want to use all other extra packages
@@ -230,54 +246,67 @@ python examples/ai_society/role_playing.py
230
246
  Please note that the environment variable is session-specific. If you open a new terminal window or tab, you will need to set the API key again in that new session.
231
247
 
232
248
 
233
- ## Use Open-Source Models as Backends
234
-
235
- The basic workflow of using an open-sourced model as the backend is based on an external server running LLM inference service, e.g. during the development we chose [FastChat](https://github.com/lm-sys/FastChat) to run the service.
236
-
237
- We do not fix the choice of server to decouple the implementation of any specific LLM inference server with CAMEL (indicating the server needs to be deployed by the user himself). But the server to be deployed must satisfy that **it supports OpenAI-compatible APIs, especially the method `openai.ChatCompletion.create`**.
238
-
239
- Here are some instructions for enabling open-source backends, where we use the [FastChat](https://github.com/lm-sys/FastChat) and a LLaMA2-based model ([`meta-llama/Llama-2-7b-chat-hf`](https://huggingface.co/meta-llama/Llama-2-7b-chat-hf)) in the example. Please install FastChat in advance following their installation guidance.
249
+ ## Use Open-Source Models as Backends (ex. using Ollama to set Llama 3 locally)
240
250
 
241
- 1. Before running CAMEL, we should firstly launch FastChat server following the guidance on https://github.com/lm-sys/FastChat/blob/main/docs/openai_api.md. The instructions summarized below should be kept running **in separate processes**:
242
-
243
- ```sh
244
- # Launch the controller
245
- python -m fastchat.serve.controller
246
-
247
- # Launch the model worker(s)
248
- python3 -m fastchat.serve.model_worker --model-path meta-llama/Llama-2-7b-chat-hf
249
-
250
- # Launch the RESTful API server
251
- python3 -m fastchat.serve.openai_api_server --host localhost --port 8000
252
- ```
251
+ - Download [Ollama](https://ollama.com/download).
252
+ - After setting up Ollama, pull the Llama3 model by typing the following command into the terminal:
253
+ ```bash
254
+ ollama pull llama3
255
+ ```
256
+ - Create a ModelFile similar the one below in your project directory.
257
+ ```bash
258
+ FROM llama3
253
259
 
254
- 2. After observing the controller successfully receiving the heart beat signal from the worker, the server should be ready for use at http://localhost:8000/v1.
260
+ # Set parameters
261
+ PARAMETER temperature 0.8
262
+ PARAMETER stop Result
255
263
 
256
- 3. Then we can try on running `role_playing_with_open_source_model.py`, where each agent in this example is initialized with specifying the `model_path` and `server_url`, similar to the example code below:
264
+ # Sets a custom system message to specify the behavior of the chat assistant
257
265
 
258
- ```python
259
- system_message = # ...
266
+ # Leaving it blank for now.
260
267
 
261
- agent_kwargs = dict(
262
- model=model_type,
263
- model_config=OpenSourceConfig(
264
- model_path="meta-llama/Llama-2-7b-chat-hf",
265
- server_url="http://localhost:8000/v1",
266
- ),
267
- )
268
+ SYSTEM """ """
269
+ ```
270
+ - Create a script to get the base model (llama3) and create a custom model using the ModelFile above. Save this as a .sh file:
271
+ ```bash
272
+ #!/bin/zsh
268
273
 
269
- agent = ChatAgent(
270
- system_message,
271
- **agent_kwargs,
272
- )
273
- ```
274
+ # variables
275
+ model_name="llama3"
276
+ custom_model_name="camel-llama3"
274
277
 
275
- ### Supported Models
278
+ #get the base model
279
+ ollama pull $model_name
276
280
 
277
- - LLaMA2-based models
278
- - example: [meta-llama/Llama-2-7b-chat-hf](https://huggingface.co/meta-llama/Llama-2-7b-chat-hf)
279
- - Vicuna-based models
280
- - example: [lmsys/vicuna-7b-v1.5](https://huggingface.co/lmsys/vicuna-7b-v1.5)
281
+ #create the model file
282
+ ollama create $custom_model_name -f ./Llama3ModelFile
283
+ ```
284
+ - Navigate to the directory where the script and ModelFile are located and run the script. Enjoy your Llama3 model, enhanced by CAMEL's excellent agents.
285
+ ```python
286
+ from camel.agents import ChatAgent
287
+ from camel.messages import BaseMessage
288
+ from camel.models import ModelFactory
289
+ from camel.types import ModelPlatformType
290
+
291
+ ollama_model = ModelFactory.create(
292
+ model_platform=ModelPlatformType.OLLAMA,
293
+ model_type="llama3",
294
+ url="http://localhost:11434/v1",
295
+ model_config_dict={"temperature": 0.4},
296
+ )
297
+
298
+ assistant_sys_msg = BaseMessage.make_assistant_message(
299
+ role_name="Assistant",
300
+ content="You are a helpful assistant.",
301
+ )
302
+ agent = ChatAgent(assistant_sys_msg, model=ollama_model, token_limit=4096)
303
+
304
+ user_msg = BaseMessage.make_user_message(
305
+ role_name="User", content="Say hi to CAMEL"
306
+ )
307
+ assistant_response = agent.step(user_msg)
308
+ print(assistant_response.msg.content)
309
+ ```
281
310
 
282
311
  ## Data (Hosted on Hugging Face)
283
312
  | Dataset | Chat format | Instruction format | Chat format (translated) |
@@ -55,6 +55,10 @@ To install the base CAMEL library:
55
55
  pip install camel-ai
56
56
  ```
57
57
  Some features require extra dependencies:
58
+ - To install with all dependencies:
59
+ ```bash
60
+ pip install 'camel-ai[all]'
61
+ ```
58
62
  - To use the HuggingFace agents:
59
63
  ```bash
60
64
  pip install 'camel-ai[huggingface-agent]'
@@ -63,10 +67,6 @@ Some features require extra dependencies:
63
67
  ```bash
64
68
  pip install 'camel-ai[tools]'
65
69
  ```
66
- - To install with all dependencies:
67
- ```bash
68
- pip install 'camel-ai[all]'
69
- ```
70
70
 
71
71
  ### From Source
72
72
 
@@ -81,14 +81,20 @@ git clone https://github.com/camel-ai/camel.git
81
81
  # Change directory into project directory
82
82
  cd camel
83
83
 
84
- # Activate camel virtual environment
84
+ # If you didn't install peotry before
85
+ pip install poetry # (Optional)
86
+
87
+ # We suggest using python 3.10
88
+ poetry env use python3.10 # (Optional)
89
+
90
+ # Activate CAMEL virtual environment
85
91
  poetry shell
86
92
 
87
- # Install camel from source
88
- # It takes about 90 seconds to resolve dependencies
93
+ # Install the base CAMEL library
94
+ # It takes about 90 seconds
89
95
  poetry install
90
96
 
91
- # Or if you want to use all other extra packages
97
+ # Install CAMEL with all dependencies
92
98
  poetry install -E all # (Optional)
93
99
 
94
100
  # Exit the virtual environment
@@ -100,16 +106,16 @@ Install `CAMEL` from source with conda and pip:
100
106
  # Create a conda virtual environment
101
107
  conda create --name camel python=3.10
102
108
 
103
- # Activate camel conda environment
109
+ # Activate CAMEL conda environment
104
110
  conda activate camel
105
111
 
106
112
  # Clone github repo
107
- git clone -b v0.1.5.1 https://github.com/camel-ai/camel.git
113
+ git clone -b v0.1.5.3 https://github.com/camel-ai/camel.git
108
114
 
109
115
  # Change directory into project directory
110
116
  cd camel
111
117
 
112
- # Install camel from source
118
+ # Install CAMEL from source
113
119
  pip install -e .
114
120
 
115
121
  # Or if you want to use all other extra packages
@@ -164,54 +170,67 @@ python examples/ai_society/role_playing.py
164
170
  Please note that the environment variable is session-specific. If you open a new terminal window or tab, you will need to set the API key again in that new session.
165
171
 
166
172
 
167
- ## Use Open-Source Models as Backends
168
-
169
- The basic workflow of using an open-sourced model as the backend is based on an external server running LLM inference service, e.g. during the development we chose [FastChat](https://github.com/lm-sys/FastChat) to run the service.
170
-
171
- We do not fix the choice of server to decouple the implementation of any specific LLM inference server with CAMEL (indicating the server needs to be deployed by the user himself). But the server to be deployed must satisfy that **it supports OpenAI-compatible APIs, especially the method `openai.ChatCompletion.create`**.
172
-
173
- Here are some instructions for enabling open-source backends, where we use the [FastChat](https://github.com/lm-sys/FastChat) and a LLaMA2-based model ([`meta-llama/Llama-2-7b-chat-hf`](https://huggingface.co/meta-llama/Llama-2-7b-chat-hf)) in the example. Please install FastChat in advance following their installation guidance.
173
+ ## Use Open-Source Models as Backends (ex. using Ollama to set Llama 3 locally)
174
174
 
175
- 1. Before running CAMEL, we should firstly launch FastChat server following the guidance on https://github.com/lm-sys/FastChat/blob/main/docs/openai_api.md. The instructions summarized below should be kept running **in separate processes**:
176
-
177
- ```sh
178
- # Launch the controller
179
- python -m fastchat.serve.controller
180
-
181
- # Launch the model worker(s)
182
- python3 -m fastchat.serve.model_worker --model-path meta-llama/Llama-2-7b-chat-hf
183
-
184
- # Launch the RESTful API server
185
- python3 -m fastchat.serve.openai_api_server --host localhost --port 8000
186
- ```
175
+ - Download [Ollama](https://ollama.com/download).
176
+ - After setting up Ollama, pull the Llama3 model by typing the following command into the terminal:
177
+ ```bash
178
+ ollama pull llama3
179
+ ```
180
+ - Create a ModelFile similar the one below in your project directory.
181
+ ```bash
182
+ FROM llama3
187
183
 
188
- 2. After observing the controller successfully receiving the heart beat signal from the worker, the server should be ready for use at http://localhost:8000/v1.
184
+ # Set parameters
185
+ PARAMETER temperature 0.8
186
+ PARAMETER stop Result
189
187
 
190
- 3. Then we can try on running `role_playing_with_open_source_model.py`, where each agent in this example is initialized with specifying the `model_path` and `server_url`, similar to the example code below:
188
+ # Sets a custom system message to specify the behavior of the chat assistant
191
189
 
192
- ```python
193
- system_message = # ...
190
+ # Leaving it blank for now.
194
191
 
195
- agent_kwargs = dict(
196
- model=model_type,
197
- model_config=OpenSourceConfig(
198
- model_path="meta-llama/Llama-2-7b-chat-hf",
199
- server_url="http://localhost:8000/v1",
200
- ),
201
- )
192
+ SYSTEM """ """
193
+ ```
194
+ - Create a script to get the base model (llama3) and create a custom model using the ModelFile above. Save this as a .sh file:
195
+ ```bash
196
+ #!/bin/zsh
202
197
 
203
- agent = ChatAgent(
204
- system_message,
205
- **agent_kwargs,
206
- )
207
- ```
198
+ # variables
199
+ model_name="llama3"
200
+ custom_model_name="camel-llama3"
208
201
 
209
- ### Supported Models
202
+ #get the base model
203
+ ollama pull $model_name
210
204
 
211
- - LLaMA2-based models
212
- - example: [meta-llama/Llama-2-7b-chat-hf](https://huggingface.co/meta-llama/Llama-2-7b-chat-hf)
213
- - Vicuna-based models
214
- - example: [lmsys/vicuna-7b-v1.5](https://huggingface.co/lmsys/vicuna-7b-v1.5)
205
+ #create the model file
206
+ ollama create $custom_model_name -f ./Llama3ModelFile
207
+ ```
208
+ - Navigate to the directory where the script and ModelFile are located and run the script. Enjoy your Llama3 model, enhanced by CAMEL's excellent agents.
209
+ ```python
210
+ from camel.agents import ChatAgent
211
+ from camel.messages import BaseMessage
212
+ from camel.models import ModelFactory
213
+ from camel.types import ModelPlatformType
214
+
215
+ ollama_model = ModelFactory.create(
216
+ model_platform=ModelPlatformType.OLLAMA,
217
+ model_type="llama3",
218
+ url="http://localhost:11434/v1",
219
+ model_config_dict={"temperature": 0.4},
220
+ )
221
+
222
+ assistant_sys_msg = BaseMessage.make_assistant_message(
223
+ role_name="Assistant",
224
+ content="You are a helpful assistant.",
225
+ )
226
+ agent = ChatAgent(assistant_sys_msg, model=ollama_model, token_limit=4096)
227
+
228
+ user_msg = BaseMessage.make_user_message(
229
+ role_name="User", content="Say hi to CAMEL"
230
+ )
231
+ assistant_response = agent.step(user_msg)
232
+ print(assistant_response.msg.content)
233
+ ```
215
234
 
216
235
  ## Data (Hosted on Hugging Face)
217
236
  | Dataset | Chat format | Instruction format | Chat format (translated) |
@@ -17,6 +17,7 @@ from .critic_agent import CriticAgent
17
17
  from .embodied_agent import EmbodiedAgent
18
18
  from .knowledge_graph_agent import KnowledgeGraphAgent
19
19
  from .role_assignment_agent import RoleAssignmentAgent
20
+ from .search_agent import SearchAgent
20
21
  from .task_agent import (
21
22
  TaskCreationAgent,
22
23
  TaskPlannerAgent,
@@ -38,5 +39,6 @@ __all__ = [
38
39
  'HuggingFaceToolAgent',
39
40
  'EmbodiedAgent',
40
41
  'RoleAssignmentAgent',
42
+ 'SearchAgent',
41
43
  'KnowledgeGraphAgent',
42
44
  ]