camel-ai 0.2.3__py3-none-any.whl → 0.2.3a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of camel-ai might be problematic. Click here for more details.

Files changed (87) hide show
  1. camel/__init__.py +1 -1
  2. camel/agents/chat_agent.py +69 -93
  3. camel/agents/knowledge_graph_agent.py +6 -4
  4. camel/bots/__init__.py +2 -16
  5. camel/bots/discord_bot.py +206 -0
  6. camel/configs/__init__.py +2 -1
  7. camel/configs/anthropic_config.py +5 -2
  8. camel/configs/base_config.py +6 -6
  9. camel/configs/groq_config.py +3 -2
  10. camel/configs/ollama_config.py +2 -1
  11. camel/configs/openai_config.py +23 -2
  12. camel/configs/samba_config.py +2 -2
  13. camel/configs/togetherai_config.py +1 -1
  14. camel/configs/vllm_config.py +1 -1
  15. camel/configs/zhipuai_config.py +3 -2
  16. camel/embeddings/openai_embedding.py +2 -2
  17. camel/loaders/__init__.py +0 -2
  18. camel/loaders/firecrawl_reader.py +3 -3
  19. camel/loaders/unstructured_io.py +33 -35
  20. camel/messages/__init__.py +0 -1
  21. camel/models/__init__.py +4 -2
  22. camel/models/anthropic_model.py +26 -32
  23. camel/models/azure_openai_model.py +36 -39
  24. camel/models/base_model.py +20 -31
  25. camel/models/gemini_model.py +29 -37
  26. camel/models/groq_model.py +23 -29
  27. camel/models/litellm_model.py +61 -44
  28. camel/models/mistral_model.py +29 -32
  29. camel/models/model_factory.py +76 -66
  30. camel/models/nemotron_model.py +23 -33
  31. camel/models/ollama_model.py +47 -42
  32. camel/models/open_source_model.py +170 -0
  33. camel/models/{openai_compatible_model.py → openai_compatibility_model.py} +49 -31
  34. camel/models/openai_model.py +29 -48
  35. camel/models/reka_model.py +28 -30
  36. camel/models/samba_model.py +177 -82
  37. camel/models/stub_model.py +2 -2
  38. camel/models/togetherai_model.py +43 -37
  39. camel/models/vllm_model.py +50 -43
  40. camel/models/zhipuai_model.py +27 -33
  41. camel/retrievers/auto_retriever.py +10 -28
  42. camel/retrievers/vector_retriever.py +47 -58
  43. camel/societies/babyagi_playing.py +3 -6
  44. camel/societies/role_playing.py +3 -5
  45. camel/storages/graph_storages/graph_element.py +5 -3
  46. camel/storages/key_value_storages/json.py +1 -6
  47. camel/toolkits/__init__.py +7 -20
  48. camel/toolkits/base.py +3 -2
  49. camel/toolkits/code_execution.py +7 -6
  50. camel/toolkits/dalle_toolkit.py +6 -6
  51. camel/toolkits/github_toolkit.py +10 -9
  52. camel/toolkits/google_maps_toolkit.py +7 -7
  53. camel/toolkits/linkedin_toolkit.py +7 -7
  54. camel/toolkits/math_toolkit.py +8 -8
  55. camel/toolkits/open_api_toolkit.py +5 -5
  56. camel/toolkits/{function_tool.py → openai_function.py} +11 -34
  57. camel/toolkits/reddit_toolkit.py +7 -7
  58. camel/toolkits/retrieval_toolkit.py +5 -5
  59. camel/toolkits/search_toolkit.py +9 -9
  60. camel/toolkits/slack_toolkit.py +11 -11
  61. camel/toolkits/twitter_toolkit.py +452 -378
  62. camel/toolkits/weather_toolkit.py +6 -6
  63. camel/types/__init__.py +1 -6
  64. camel/types/enums.py +85 -40
  65. camel/types/openai_types.py +0 -3
  66. camel/utils/__init__.py +2 -0
  67. camel/utils/async_func.py +7 -7
  68. camel/utils/commons.py +3 -32
  69. camel/utils/token_counting.py +212 -30
  70. camel/workforce/role_playing_worker.py +1 -1
  71. camel/workforce/single_agent_worker.py +1 -1
  72. camel/workforce/task_channel.py +3 -4
  73. camel/workforce/workforce.py +4 -4
  74. {camel_ai-0.2.3.dist-info → camel_ai-0.2.3a1.dist-info}/METADATA +56 -27
  75. {camel_ai-0.2.3.dist-info → camel_ai-0.2.3a1.dist-info}/RECORD +76 -85
  76. {camel_ai-0.2.3.dist-info → camel_ai-0.2.3a1.dist-info}/WHEEL +1 -1
  77. camel/bots/discord_app.py +0 -138
  78. camel/bots/slack/__init__.py +0 -30
  79. camel/bots/slack/models.py +0 -158
  80. camel/bots/slack/slack_app.py +0 -255
  81. camel/loaders/chunkr_reader.py +0 -163
  82. camel/toolkits/arxiv_toolkit.py +0 -155
  83. camel/toolkits/ask_news_toolkit.py +0 -653
  84. camel/toolkits/google_scholar_toolkit.py +0 -146
  85. camel/toolkits/whatsapp_toolkit.py +0 -177
  86. camel/types/unified_model_type.py +0 -104
  87. camel_ai-0.2.3.dist-info/LICENSE +0 -201
@@ -18,54 +18,59 @@ from typing import Any, Dict, List, Optional, Union
18
18
  from openai import OpenAI, Stream
19
19
 
20
20
  from camel.messages import OpenAIMessage
21
- from camel.models import BaseModelBackend
22
- from camel.types import (
23
- ChatCompletion,
24
- ChatCompletionChunk,
25
- ModelType,
26
- )
21
+ from camel.types import ChatCompletion, ChatCompletionChunk, ModelType
27
22
  from camel.utils import (
28
23
  BaseTokenCounter,
29
24
  OpenAITokenCounter,
30
25
  )
31
26
 
32
27
 
33
- class OpenAICompatibleModel(BaseModelBackend):
34
- r"""Constructor for model backend supporting OpenAI compatibility.
35
-
36
- Args:
37
- model_type (Union[ModelType, str]): Model for which a backend is
38
- created.
39
- model_config_dict (Optional[Dict[str, Any]], optional): A dictionary
40
- that will be fed into:obj:`openai.ChatCompletion.create()`. If
41
- :obj:`None`, :obj:`{}` will be used. (default: :obj:`None`)
42
- api_key (str): The API key for authenticating with the model service.
43
- url (str): The url to the model service.
44
- token_counter (Optional[BaseTokenCounter], optional): Token counter to
45
- use for the model. If not provided, :obj:`OpenAITokenCounter(
46
- ModelType.GPT_4O_MINI)` will be used.
47
- (default: :obj:`None`)
48
- """
28
+ class OpenAICompatibilityModel:
29
+ r"""LLM API served by OpenAI-compatible providers."""
49
30
 
50
31
  def __init__(
51
32
  self,
52
- model_type: Union[ModelType, str],
53
- model_config_dict: Optional[Dict[str, Any]] = None,
33
+ model_type: str,
34
+ model_config_dict: Dict[str, Any],
54
35
  api_key: Optional[str] = None,
55
36
  url: Optional[str] = None,
56
37
  token_counter: Optional[BaseTokenCounter] = None,
57
38
  ) -> None:
58
- self.api_key = api_key or os.environ.get("OPENAI_COMPATIBILIY_API_KEY")
59
- self.url = url or os.environ.get("OPENAI_COMPATIBILIY_API_BASE_URL")
60
- super().__init__(
61
- model_type, model_config_dict, api_key, url, token_counter
39
+ r"""Constructor for model backend.
40
+
41
+ Args:
42
+ model_type (str): Model for which a backend is created.
43
+ model_config_dict (Dict[str, Any]): A dictionary that will
44
+ be fed into openai.ChatCompletion.create().
45
+ api_key (str): The API key for authenticating with the
46
+ model service. (default: :obj:`None`)
47
+ url (str): The url to the model service. (default:
48
+ :obj:`None`)
49
+ token_counter (Optional[BaseTokenCounter]): Token counter to use
50
+ for the model. If not provided, `OpenAITokenCounter(ModelType.
51
+ GPT_4O_MINI)` will be used.
52
+ """
53
+ self.model_type = model_type
54
+ self.model_config_dict = model_config_dict
55
+ self._url = url or os.environ.get("OPENAI_COMPATIBILIY_API_BASE_URL")
56
+ self._api_key = api_key or os.environ.get(
57
+ "OPENAI_COMPATIBILIY_API_KEY"
62
58
  )
59
+ if self._url is None:
60
+ raise ValueError(
61
+ "For OpenAI-compatible models, you must provide the `url`."
62
+ )
63
+ if self._api_key is None:
64
+ raise ValueError(
65
+ "For OpenAI-compatible models, you must provide the `api_key`."
66
+ )
63
67
  self._client = OpenAI(
64
68
  timeout=60,
65
69
  max_retries=3,
66
- api_key=self._api_key,
67
70
  base_url=self._url,
71
+ api_key=self._api_key,
68
72
  )
73
+ self._token_counter = token_counter
69
74
 
70
75
  def run(
71
76
  self,
@@ -112,5 +117,18 @@ class OpenAICompatibleModel(BaseModelBackend):
112
117
  """
113
118
  return self.model_config_dict.get('stream', False)
114
119
 
115
- def check_model_config(self):
116
- pass
120
+ @property
121
+ def token_limit(self) -> int:
122
+ r"""Returns the maximum token limit for the given model.
123
+
124
+ Returns:
125
+ int: The maximum token limit for the given model.
126
+ """
127
+ max_tokens = self.model_config_dict.get("max_tokens")
128
+ if isinstance(max_tokens, int):
129
+ return max_tokens
130
+ print(
131
+ "Must set `max_tokens` as an integer in `model_config_dict` when"
132
+ " setting up the model. Using 4096 as default value."
133
+ )
134
+ return 4096
@@ -12,19 +12,14 @@
12
12
  # limitations under the License.
13
13
  # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
14
  import os
15
- import warnings
16
15
  from typing import Any, Dict, List, Optional, Union
17
16
 
18
17
  from openai import OpenAI, Stream
19
18
 
20
- from camel.configs import OPENAI_API_PARAMS, ChatGPTConfig
19
+ from camel.configs import OPENAI_API_PARAMS
21
20
  from camel.messages import OpenAIMessage
22
21
  from camel.models import BaseModelBackend
23
- from camel.types import (
24
- ChatCompletion,
25
- ChatCompletionChunk,
26
- ModelType,
27
- )
22
+ from camel.types import ChatCompletion, ChatCompletionChunk, ModelType
28
23
  from camel.utils import (
29
24
  BaseTokenCounter,
30
25
  OpenAITokenCounter,
@@ -33,39 +28,36 @@ from camel.utils import (
33
28
 
34
29
 
35
30
  class OpenAIModel(BaseModelBackend):
36
- r"""OpenAI API in a unified BaseModelBackend interface.
37
-
38
- Args:
39
- model_type (Union[ModelType, str]): Model for which a backend is
40
- created, one of GPT_* series.
41
- model_config_dict (Optional[Dict[str, Any]], optional): A dictionary
42
- that will be fed into:obj:`openai.ChatCompletion.create()`. If
43
- :obj:`None`, :obj:`ChatGPTConfig().as_dict()` will be used.
44
- (default: :obj:`None`)
45
- api_key (Optional[str], optional): The API key for authenticating
46
- with the OpenAI service. (default: :obj:`None`)
47
- url (Optional[str], optional): The url to the OpenAI service.
48
- (default: :obj:`None`)
49
- token_counter (Optional[BaseTokenCounter], optional): Token counter to
50
- use for the model. If not provided, :obj:`OpenAITokenCounter` will
51
- be used. (default: :obj:`None`)
52
- """
31
+ r"""OpenAI API in a unified BaseModelBackend interface."""
53
32
 
54
33
  def __init__(
55
34
  self,
56
- model_type: Union[ModelType, str],
57
- model_config_dict: Optional[Dict[str, Any]] = None,
35
+ model_type: ModelType,
36
+ model_config_dict: Dict[str, Any],
58
37
  api_key: Optional[str] = None,
59
38
  url: Optional[str] = None,
60
39
  token_counter: Optional[BaseTokenCounter] = None,
61
40
  ) -> None:
62
- if model_config_dict is None:
63
- model_config_dict = ChatGPTConfig().as_dict()
64
- api_key = api_key or os.environ.get("OPENAI_API_KEY")
65
- url = url or os.environ.get("OPENAI_API_BASE_URL")
41
+ r"""Constructor for OpenAI backend.
42
+
43
+ Args:
44
+ model_type (ModelType): Model for which a backend is created,
45
+ one of GPT_* series.
46
+ model_config_dict (Dict[str, Any]): A dictionary that will
47
+ be fed into openai.ChatCompletion.create().
48
+ api_key (Optional[str]): The API key for authenticating with the
49
+ OpenAI service. (default: :obj:`None`)
50
+ url (Optional[str]): The url to the OpenAI service. (default:
51
+ :obj:`None`)
52
+ token_counter (Optional[BaseTokenCounter]): Token counter to use
53
+ for the model. If not provided, `OpenAITokenCounter` will
54
+ be used.
55
+ """
66
56
  super().__init__(
67
57
  model_type, model_config_dict, api_key, url, token_counter
68
58
  )
59
+ self._url = url or os.environ.get("OPENAI_API_BASE_URL")
60
+ self._api_key = api_key or os.environ.get("OPENAI_API_KEY")
69
61
  self._client = OpenAI(
70
62
  timeout=60,
71
63
  max_retries=3,
@@ -104,23 +96,13 @@ class OpenAIModel(BaseModelBackend):
104
96
  # o1-preview and o1-mini have Beta limitations
105
97
  # reference: https://platform.openai.com/docs/guides/reasoning
106
98
  if self.model_type in [ModelType.O1_MINI, ModelType.O1_PREVIEW]:
107
- warnings.warn(
108
- "Warning: You are using an O1 model (O1_MINI or O1_PREVIEW), "
109
- "which has certain limitations, reference: "
110
- "`https://platform.openai.com/docs/guides/reasoning`.",
111
- UserWarning,
112
- )
113
-
114
99
  # Remove system message that is not supported in o1 model.
115
100
  messages = [msg for msg in messages if msg.get("role") != "system"]
116
101
 
117
- # Check and remove unsupported parameters and reset the fixed
118
- # parameters
119
- unsupported_keys = ["stream", "tools", "tool_choice"]
120
- for key in unsupported_keys:
121
- if key in self.model_config_dict:
122
- del self.model_config_dict[key]
123
-
102
+ # Remove unsupported parameters and reset the fixed parameters
103
+ del self.model_config_dict["stream"]
104
+ del self.model_config_dict["tools"]
105
+ del self.model_config_dict["tool_choice"]
124
106
  self.model_config_dict["temperature"] = 1.0
125
107
  self.model_config_dict["top_p"] = 1.0
126
108
  self.model_config_dict["n"] = 1.0
@@ -129,7 +111,7 @@ class OpenAIModel(BaseModelBackend):
129
111
 
130
112
  response = self._client.chat.completions.create(
131
113
  messages=messages,
132
- model=self.model_type,
114
+ model=self.model_type.value,
133
115
  **self.model_config_dict,
134
116
  )
135
117
  return response
@@ -151,9 +133,8 @@ class OpenAIModel(BaseModelBackend):
151
133
 
152
134
  @property
153
135
  def stream(self) -> bool:
154
- r"""Returns whether the model is in stream mode, which sends partial
155
- results each time.
156
-
136
+ r"""Returns whether the model is in stream mode,
137
+ which sends partial results each time.
157
138
  Returns:
158
139
  bool: Whether the model is in stream mode.
159
140
  """
@@ -11,9 +11,10 @@
11
11
  # See the License for the specific language governing permissions and
12
12
  # limitations under the License.
13
13
  # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
- from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
14
+ import os
15
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional
15
16
 
16
- from camel.configs import REKA_API_PARAMS, RekaConfig
17
+ from camel.configs import REKA_API_PARAMS
17
18
  from camel.messages import OpenAIMessage
18
19
  from camel.models import BaseModelBackend
19
20
  from camel.types import ChatCompletion, ModelType
@@ -21,7 +22,6 @@ from camel.utils import (
21
22
  BaseTokenCounter,
22
23
  OpenAITokenCounter,
23
24
  api_keys_required,
24
- dependencies_required,
25
25
  )
26
26
 
27
27
  if TYPE_CHECKING:
@@ -39,42 +39,40 @@ except (ImportError, AttributeError):
39
39
 
40
40
 
41
41
  class RekaModel(BaseModelBackend):
42
- r"""Reka API in a unified BaseModelBackend interface.
43
-
44
- Args:
45
- model_type (Union[ModelType, str]): Model for which a backend is
46
- created, one of REKA_* series.
47
- model_config_dict (Optional[Dict[str, Any]], optional): A dictionary
48
- that will be fed into:obj:`Reka.chat.create()`. If :obj:`None`,
49
- :obj:`RekaConfig().as_dict()` will be used. (default: :obj:`None`)
50
- api_key (Optional[str], optional): The API key for authenticating with
51
- the Reka service. (default: :obj:`None`)
52
- url (Optional[str], optional): The url to the Reka service.
53
- (default: :obj:`None`)
54
- token_counter (Optional[BaseTokenCounter], optional): Token counter to
55
- use for the model. If not provided, :obj:`OpenAITokenCounter` will
56
- be used. (default: :obj:`None`)
57
- """
58
-
59
- @dependencies_required('reka')
42
+ r"""Reka API in a unified BaseModelBackend interface."""
43
+
60
44
  def __init__(
61
45
  self,
62
- model_type: Union[ModelType, str],
63
- model_config_dict: Optional[Dict[str, Any]] = None,
46
+ model_type: ModelType,
47
+ model_config_dict: Dict[str, Any],
64
48
  api_key: Optional[str] = None,
65
49
  url: Optional[str] = None,
66
50
  token_counter: Optional[BaseTokenCounter] = None,
67
51
  ) -> None:
68
- from reka.client import Reka
52
+ r"""Constructor for Reka backend.
69
53
 
70
- if model_config_dict is None:
71
- model_config_dict = RekaConfig().as_dict()
72
- api_key = api_key or os.environ.get("REKA_API_KEY")
73
- url = url or os.environ.get("REKA_API_BASE_URL")
54
+ Args:
55
+ model_type (ModelType): Model for which a backend is created,
56
+ one of REKA_* series.
57
+ model_config_dict (Dict[str, Any]): A dictionary that will
58
+ be fed into `Reka.chat.create`.
59
+ api_key (Optional[str]): The API key for authenticating with the
60
+ Reka service. (default: :obj:`None`)
61
+ url (Optional[str]): The url to the Reka service.
62
+ token_counter (Optional[BaseTokenCounter]): Token counter to use
63
+ for the model. If not provided, `OpenAITokenCounter` will be
64
+ used.
65
+ """
74
66
  super().__init__(
75
67
  model_type, model_config_dict, api_key, url, token_counter
76
68
  )
69
+ self._api_key = api_key or os.environ.get("REKA_API_KEY")
70
+ self._url = url or os.environ.get("REKA_SERVER_URL")
71
+
72
+ from reka.client import Reka
73
+
77
74
  self._client = Reka(api_key=self._api_key, base_url=self._url)
75
+ self._token_counter: Optional[BaseTokenCounter] = None
78
76
 
79
77
  def _convert_reka_to_openai_response(
80
78
  self, response: 'ChatResponse'
@@ -186,7 +184,7 @@ class RekaModel(BaseModelBackend):
186
184
 
187
185
  response = self._client.chat.create(
188
186
  messages=reka_messages,
189
- model=self.model_type,
187
+ model=self.model_type.value,
190
188
  **self.model_config_dict,
191
189
  )
192
190
 
@@ -202,7 +200,7 @@ class RekaModel(BaseModelBackend):
202
200
  prompt_tokens=openai_response.usage.input_tokens, # type: ignore[union-attr]
203
201
  completion=openai_response.choices[0].message.content,
204
202
  completion_tokens=openai_response.usage.output_tokens, # type: ignore[union-attr]
205
- model=self.model_type,
203
+ model=self.model_type.value,
206
204
  )
207
205
  record(llm_event)
208
206