camel-ai 0.2.3a1__py3-none-any.whl → 0.2.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of camel-ai might be problematic. Click here for more details.

Files changed (87) hide show
  1. camel/__init__.py +1 -1
  2. camel/agents/chat_agent.py +93 -69
  3. camel/agents/knowledge_graph_agent.py +4 -6
  4. camel/bots/__init__.py +16 -2
  5. camel/bots/discord_app.py +138 -0
  6. camel/bots/slack/__init__.py +30 -0
  7. camel/bots/slack/models.py +158 -0
  8. camel/bots/slack/slack_app.py +255 -0
  9. camel/configs/__init__.py +1 -2
  10. camel/configs/anthropic_config.py +2 -5
  11. camel/configs/base_config.py +6 -6
  12. camel/configs/groq_config.py +2 -3
  13. camel/configs/ollama_config.py +1 -2
  14. camel/configs/openai_config.py +2 -23
  15. camel/configs/samba_config.py +2 -2
  16. camel/configs/togetherai_config.py +1 -1
  17. camel/configs/vllm_config.py +1 -1
  18. camel/configs/zhipuai_config.py +2 -3
  19. camel/embeddings/openai_embedding.py +2 -2
  20. camel/loaders/__init__.py +2 -0
  21. camel/loaders/chunkr_reader.py +163 -0
  22. camel/loaders/firecrawl_reader.py +3 -3
  23. camel/loaders/unstructured_io.py +35 -33
  24. camel/messages/__init__.py +1 -0
  25. camel/models/__init__.py +2 -4
  26. camel/models/anthropic_model.py +32 -26
  27. camel/models/azure_openai_model.py +39 -36
  28. camel/models/base_model.py +31 -20
  29. camel/models/gemini_model.py +37 -29
  30. camel/models/groq_model.py +29 -23
  31. camel/models/litellm_model.py +44 -61
  32. camel/models/mistral_model.py +32 -29
  33. camel/models/model_factory.py +66 -76
  34. camel/models/nemotron_model.py +33 -23
  35. camel/models/ollama_model.py +42 -47
  36. camel/models/{openai_compatibility_model.py → openai_compatible_model.py} +31 -49
  37. camel/models/openai_model.py +48 -29
  38. camel/models/reka_model.py +30 -28
  39. camel/models/samba_model.py +82 -177
  40. camel/models/stub_model.py +2 -2
  41. camel/models/togetherai_model.py +37 -43
  42. camel/models/vllm_model.py +43 -50
  43. camel/models/zhipuai_model.py +33 -27
  44. camel/retrievers/auto_retriever.py +29 -97
  45. camel/retrievers/vector_retriever.py +58 -47
  46. camel/societies/babyagi_playing.py +6 -3
  47. camel/societies/role_playing.py +5 -3
  48. camel/storages/graph_storages/graph_element.py +2 -2
  49. camel/storages/key_value_storages/json.py +6 -1
  50. camel/toolkits/__init__.py +20 -7
  51. camel/toolkits/arxiv_toolkit.py +155 -0
  52. camel/toolkits/ask_news_toolkit.py +653 -0
  53. camel/toolkits/base.py +2 -3
  54. camel/toolkits/code_execution.py +6 -7
  55. camel/toolkits/dalle_toolkit.py +6 -6
  56. camel/toolkits/{openai_function.py → function_tool.py} +34 -11
  57. camel/toolkits/github_toolkit.py +9 -10
  58. camel/toolkits/google_maps_toolkit.py +7 -7
  59. camel/toolkits/google_scholar_toolkit.py +146 -0
  60. camel/toolkits/linkedin_toolkit.py +7 -7
  61. camel/toolkits/math_toolkit.py +8 -8
  62. camel/toolkits/open_api_toolkit.py +5 -5
  63. camel/toolkits/reddit_toolkit.py +7 -7
  64. camel/toolkits/retrieval_toolkit.py +5 -5
  65. camel/toolkits/search_toolkit.py +9 -9
  66. camel/toolkits/slack_toolkit.py +11 -11
  67. camel/toolkits/twitter_toolkit.py +378 -452
  68. camel/toolkits/weather_toolkit.py +6 -6
  69. camel/toolkits/whatsapp_toolkit.py +177 -0
  70. camel/types/__init__.py +6 -1
  71. camel/types/enums.py +40 -85
  72. camel/types/openai_types.py +3 -0
  73. camel/types/unified_model_type.py +104 -0
  74. camel/utils/__init__.py +0 -2
  75. camel/utils/async_func.py +7 -7
  76. camel/utils/commons.py +32 -3
  77. camel/utils/token_counting.py +30 -212
  78. camel/workforce/role_playing_worker.py +1 -1
  79. camel/workforce/single_agent_worker.py +1 -1
  80. camel/workforce/task_channel.py +4 -3
  81. camel/workforce/workforce.py +4 -4
  82. camel_ai-0.2.4.dist-info/LICENSE +201 -0
  83. {camel_ai-0.2.3a1.dist-info → camel_ai-0.2.4.dist-info}/METADATA +27 -56
  84. {camel_ai-0.2.3a1.dist-info → camel_ai-0.2.4.dist-info}/RECORD +85 -76
  85. {camel_ai-0.2.3a1.dist-info → camel_ai-0.2.4.dist-info}/WHEEL +1 -1
  86. camel/bots/discord_bot.py +0 -206
  87. camel/models/open_source_model.py +0 -170
camel/bots/discord_bot.py DELETED
@@ -1,206 +0,0 @@
1
- # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
- # Licensed under the Apache License, Version 2.0 (the “License”);
3
- # you may not use this file except in compliance with the License.
4
- # You may obtain a copy of the License at
5
- #
6
- # http://www.apache.org/licenses/LICENSE-2.0
7
- #
8
- # Unless required by applicable law or agreed to in writing, software
9
- # distributed under the License is distributed on an “AS IS” BASIS,
10
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
- # See the License for the specific language governing permissions and
12
- # limitations under the License.
13
- # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
- import os
15
- from typing import TYPE_CHECKING, List, Optional, Union
16
-
17
- from camel.agents import ChatAgent
18
- from camel.messages import BaseMessage
19
- from camel.retrievers import AutoRetriever
20
- from camel.utils import dependencies_required
21
-
22
- try:
23
- from unstructured.documents.elements import Element
24
- except ImportError:
25
- Element = None
26
-
27
- if TYPE_CHECKING:
28
- from discord import Message
29
-
30
-
31
- class DiscordBot:
32
- r"""Represents a Discord bot that is powered by a CAMEL `ChatAgent`.
33
-
34
- Attributes:
35
- chat_agent (ChatAgent): Chat agent that will power the bot.
36
- channel_ids (List[int], optional): The channel IDs that the bot will
37
- listen to.
38
- discord_token (str, optional): The bot token.
39
- auto_retriever (AutoRetriever): AutoRetriever instance for RAG.
40
- vector_storage_local_path (Union[str, List[str]]): The paths to the
41
- contents for RAG.
42
- top_k (int): Top choice for the RAG response.
43
- return_detailed_info (bool): If show detailed info of the RAG response.
44
- contents (Union[str, List[str], Element, List[Element]], optional):
45
- Local file paths, remote URLs, string contents or Element objects.
46
- """
47
-
48
- @dependencies_required('discord')
49
- def __init__(
50
- self,
51
- chat_agent: ChatAgent,
52
- contents: Union[str, List[str], Element, List[Element]] = None,
53
- channel_ids: Optional[List[int]] = None,
54
- discord_token: Optional[str] = None,
55
- auto_retriever: Optional[AutoRetriever] = None,
56
- vector_storage_local_path: Union[str, List[str]] = "",
57
- top_k: int = 1,
58
- return_detailed_info: bool = True,
59
- ) -> None:
60
- self.chat_agent = chat_agent
61
- self.token = discord_token or os.getenv('DISCORD_TOKEN')
62
- self.channel_ids = channel_ids
63
- self.auto_retriever = auto_retriever
64
- self.vector_storage_local_path = vector_storage_local_path
65
- self.top_k = top_k
66
- self.return_detailed_info = return_detailed_info
67
- self.contents = contents
68
-
69
- if not self.token:
70
- raise ValueError(
71
- "`DISCORD_TOKEN` not found in environment variables. Get it"
72
- " here: `https://discord.com/developers/applications`."
73
- )
74
-
75
- import discord
76
-
77
- intents = discord.Intents.default()
78
- intents.message_content = True
79
- self.client = discord.Client(intents=intents)
80
-
81
- # Register event handlers
82
- self.client.event(self.on_ready)
83
- self.client.event(self.on_message)
84
-
85
- def run(self) -> None:
86
- r"""Start the Discord bot using its token.
87
-
88
- This method starts the Discord bot by running the client with the
89
- provided token.
90
- """
91
- self.client.run(self.token) # type: ignore[arg-type]
92
-
93
- async def on_ready(self) -> None:
94
- r"""This method is called when the bot has successfully connected to
95
- the Discord server.
96
-
97
- It prints a message indicating that the bot has logged in and displays
98
- the username of the bot.
99
- """
100
- print(f'We have logged in as {self.client.user}')
101
-
102
- async def on_message(self, message: 'Message') -> None:
103
- r"""Event handler for when a message is received.
104
-
105
- Args:
106
- message (discord.Message): The message object received.
107
- """
108
-
109
- # If the message author is the bot itself,
110
- # do not respond to this message
111
- if message.author == self.client.user:
112
- return
113
-
114
- # If allowed channel IDs are provided,
115
- # only respond to messages in those channels
116
- if self.channel_ids and message.channel.id not in self.channel_ids:
117
- return
118
-
119
- # Only respond to messages that mention the bot
120
- if not self.client.user or not self.client.user.mentioned_in(message):
121
- return
122
-
123
- user_raw_msg = message.content
124
-
125
- if self.auto_retriever:
126
- retrieved_content = self.auto_retriever.run_vector_retriever(
127
- query=user_raw_msg,
128
- contents=self.contents,
129
- top_k=self.top_k,
130
- return_detailed_info=self.return_detailed_info,
131
- )
132
- user_raw_msg = (
133
- f"Here is the query to you: {user_raw_msg}\n"
134
- f"Based on the retrieved content: {retrieved_content}, \n"
135
- f"answer the query from {message.author.name}"
136
- )
137
-
138
- user_msg = BaseMessage.make_user_message(
139
- role_name="User", content=user_raw_msg
140
- )
141
- assistant_response = self.chat_agent.step(user_msg)
142
- await message.channel.send(assistant_response.msg.content)
143
-
144
-
145
- if __name__ == "__main__":
146
- assistant_sys_msg = BaseMessage.make_assistant_message(
147
- role_name="Assistant",
148
- content='''
149
- Objective:
150
- You are a customer service bot designed to assist users
151
- with inquiries related to our open-source project.
152
- Your responses should be informative, concise, and helpful.
153
-
154
- Instructions:
155
- Understand User Queries: Carefully read and understand the
156
- user's question. Focus on keywords and context to
157
- determine the user's intent.
158
- Search for Relevant Information: Use the provided dataset
159
- and refer to the RAG (file to find answers that
160
- closely match the user's query. The RAG file contains
161
- detailed interactions and should be your primary
162
- resource for crafting responses.
163
- Provide Clear and Concise Responses: Your answers should
164
- be clear and to the point. Avoid overly technical
165
- language unless the user's query indicates
166
- familiarity with technical terms.
167
- Encourage Engagement: Where applicable, encourage users
168
- to contribute to the project or seek further
169
- assistance.
170
-
171
- Response Structure:
172
- Greeting: Begin with a polite greeting or acknowledgment.
173
- Main Response: Provide the main answer to the user's query.
174
- Additional Information: Offer any extra tips or direct the
175
- user to additional resources if necessary.
176
- Closing: Close the response politely, encouraging
177
- further engagement if appropriate.
178
- bd
179
- Tone:
180
- Professional: Maintain a professional tone that
181
- instills confidence in the user.
182
- Friendly: Be approachable and friendly to make users
183
- feel comfortable.
184
- Helpful: Always aim to be as helpful as possible,
185
- guiding users to solutions.
186
- ''',
187
- )
188
-
189
- agent = ChatAgent(
190
- assistant_sys_msg,
191
- message_window_size=10,
192
- )
193
- # Uncommented the folowing code and offer storage information
194
- # for RAG functionality
195
-
196
- # auto_retriever = AutoRetriever(
197
- # vector_storage_local_path="examples/bots",
198
- # storage_type=StorageType.QDRANT,
199
- # )
200
-
201
- bot = DiscordBot(
202
- agent,
203
- # auto_retriever=auto_retriever,
204
- # vector_storage_local_path=["local_data/"],
205
- )
206
- bot.run()
@@ -1,170 +0,0 @@
1
- # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
- # Licensed under the Apache License, Version 2.0 (the “License”);
3
- # you may not use this file except in compliance with the License.
4
- # You may obtain a copy of the License at
5
- #
6
- # http://www.apache.org/licenses/LICENSE-2.0
7
- #
8
- # Unless required by applicable law or agreed to in writing, software
9
- # distributed under the License is distributed on an “AS IS” BASIS,
10
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
- # See the License for the specific language governing permissions and
12
- # limitations under the License.
13
- # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
- from typing import Any, Dict, List, Optional, Union
15
-
16
- from openai import OpenAI, Stream
17
-
18
- from camel.configs import OPENAI_API_PARAMS
19
- from camel.messages import OpenAIMessage
20
- from camel.models import BaseModelBackend
21
- from camel.types import ChatCompletion, ChatCompletionChunk, ModelType
22
- from camel.utils import (
23
- BaseTokenCounter,
24
- OpenSourceTokenCounter,
25
- )
26
-
27
-
28
- class OpenSourceModel(BaseModelBackend):
29
- r"""Class for interace with OpenAI-API-compatible servers running
30
- open-source models.
31
- """
32
-
33
- def __init__(
34
- self,
35
- model_type: ModelType,
36
- model_config_dict: Dict[str, Any],
37
- api_key: Optional[str] = None,
38
- url: Optional[str] = None,
39
- token_counter: Optional[BaseTokenCounter] = None,
40
- ) -> None:
41
- r"""Constructor for model backends of Open-source models.
42
-
43
- Args:
44
- model_type (ModelType): Model for which a backend is created.
45
- model_config_dict (Dict[str, Any]): A dictionary that will
46
- be fed into :obj:`openai.ChatCompletion.create()`.
47
- api_key (Optional[str]): The API key for authenticating with the
48
- model service. (ignored for open-source models)
49
- url (Optional[str]): The url to the model service.
50
- token_counter (Optional[BaseTokenCounter]): Token counter to use
51
- for the model. If not provided, `OpenSourceTokenCounter` will
52
- be used.
53
- """
54
- super().__init__(
55
- model_type, model_config_dict, api_key, url, token_counter
56
- )
57
-
58
- # Check whether the input model type is open-source
59
- if not model_type.is_open_source:
60
- raise ValueError(
61
- f"Model `{model_type}` is not a supported open-source model."
62
- )
63
-
64
- # Check whether input model path is empty
65
- model_path: Optional[str] = self.model_config_dict.get(
66
- "model_path", None
67
- )
68
- if not model_path:
69
- raise ValueError("Path to open-source model is not provided.")
70
- self.model_path: str = model_path
71
-
72
- # Check whether the model name matches the model type
73
- self.model_name: str = self.model_path.split('/')[-1]
74
- if not self.model_type.validate_model_name(self.model_name):
75
- raise ValueError(
76
- f"Model name `{self.model_name}` does not match model type "
77
- f"`{self.model_type.value}`."
78
- )
79
-
80
- # Load the server URL and check whether it is None
81
- server_url: Optional[str] = url or self.model_config_dict.get(
82
- "server_url", None
83
- )
84
- if not server_url:
85
- raise ValueError(
86
- "URL to server running open-source LLM is not provided."
87
- )
88
- self.server_url: str = server_url
89
- self._client = OpenAI(
90
- base_url=self.server_url,
91
- timeout=60,
92
- max_retries=3,
93
- api_key="fake_key",
94
- )
95
-
96
- # Replace `model_config_dict` with only the params to be
97
- # passed to OpenAI API
98
- self.model_config_dict = self.model_config_dict["api_params"]
99
-
100
- @property
101
- def token_counter(self) -> BaseTokenCounter:
102
- r"""Initialize the token counter for the model backend.
103
-
104
- Returns:
105
- BaseTokenCounter: The token counter following the model's
106
- tokenization style.
107
- """
108
- if not self._token_counter:
109
- self._token_counter = OpenSourceTokenCounter(
110
- self.model_type, self.model_path
111
- )
112
- return self._token_counter
113
-
114
- def run(
115
- self,
116
- messages: List[OpenAIMessage],
117
- ) -> Union[ChatCompletion, Stream[ChatCompletionChunk]]:
118
- r"""Runs inference of OpenAI-API-style chat completion.
119
-
120
- Args:
121
- messages (List[OpenAIMessage]): Message list with the chat history
122
- in OpenAI API format.
123
-
124
- Returns:
125
- Union[ChatCompletion, Stream[ChatCompletionChunk]]:
126
- `ChatCompletion` in the non-stream mode, or
127
- `Stream[ChatCompletionChunk]` in the stream mode.
128
- """
129
- messages_openai: List[OpenAIMessage] = messages
130
- response = self._client.chat.completions.create(
131
- messages=messages_openai,
132
- model=self.model_name,
133
- **self.model_config_dict,
134
- )
135
- return response
136
-
137
- def check_model_config(self):
138
- r"""Check whether the model configuration is valid for open-source
139
- model backends.
140
-
141
- Raises:
142
- ValueError: If the model configuration dictionary contains any
143
- unexpected arguments to OpenAI API, or it does not contain
144
- :obj:`model_path` or :obj:`server_url`.
145
- """
146
- if (
147
- "model_path" not in self.model_config_dict
148
- or "server_url" not in self.model_config_dict
149
- ):
150
- raise ValueError(
151
- "Invalid configuration for open-source model backend with "
152
- ":obj:`model_path` or :obj:`server_url` missing."
153
- )
154
-
155
- for param in self.model_config_dict["api_params"]:
156
- if param not in OPENAI_API_PARAMS:
157
- raise ValueError(
158
- f"Unexpected argument `{param}` is "
159
- "input into open-source model backend."
160
- )
161
-
162
- @property
163
- def stream(self) -> bool:
164
- r"""Returns whether the model is in stream mode,
165
- which sends partial results each time.
166
-
167
- Returns:
168
- bool: Whether the model is in stream mode.
169
- """
170
- return self.model_config_dict.get('stream', False)