camel-ai 0.2.0__py3-none-any.whl → 0.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of camel-ai might be problematic. Click here for more details.

Files changed (102) hide show
  1. camel/__init__.py +1 -1
  2. camel/agents/chat_agent.py +326 -115
  3. camel/agents/knowledge_graph_agent.py +4 -6
  4. camel/bots/__init__.py +34 -0
  5. camel/bots/discord_app.py +138 -0
  6. camel/bots/slack/__init__.py +30 -0
  7. camel/bots/slack/models.py +158 -0
  8. camel/bots/slack/slack_app.py +255 -0
  9. camel/bots/telegram_bot.py +82 -0
  10. camel/configs/__init__.py +1 -2
  11. camel/configs/anthropic_config.py +2 -5
  12. camel/configs/base_config.py +6 -6
  13. camel/configs/gemini_config.py +1 -1
  14. camel/configs/groq_config.py +2 -3
  15. camel/configs/ollama_config.py +1 -2
  16. camel/configs/openai_config.py +2 -23
  17. camel/configs/samba_config.py +2 -2
  18. camel/configs/togetherai_config.py +1 -1
  19. camel/configs/vllm_config.py +1 -1
  20. camel/configs/zhipuai_config.py +2 -3
  21. camel/embeddings/openai_embedding.py +2 -2
  22. camel/loaders/__init__.py +2 -0
  23. camel/loaders/chunkr_reader.py +163 -0
  24. camel/loaders/firecrawl_reader.py +13 -45
  25. camel/loaders/unstructured_io.py +65 -29
  26. camel/messages/__init__.py +1 -0
  27. camel/messages/func_message.py +2 -2
  28. camel/models/__init__.py +2 -4
  29. camel/models/anthropic_model.py +32 -26
  30. camel/models/azure_openai_model.py +39 -36
  31. camel/models/base_model.py +31 -20
  32. camel/models/gemini_model.py +37 -29
  33. camel/models/groq_model.py +29 -23
  34. camel/models/litellm_model.py +44 -61
  35. camel/models/mistral_model.py +33 -30
  36. camel/models/model_factory.py +66 -76
  37. camel/models/nemotron_model.py +33 -23
  38. camel/models/ollama_model.py +42 -47
  39. camel/models/{openai_compatibility_model.py → openai_compatible_model.py} +36 -41
  40. camel/models/openai_model.py +48 -29
  41. camel/models/reka_model.py +30 -28
  42. camel/models/samba_model.py +82 -177
  43. camel/models/stub_model.py +2 -2
  44. camel/models/togetherai_model.py +37 -43
  45. camel/models/vllm_model.py +43 -50
  46. camel/models/zhipuai_model.py +33 -27
  47. camel/retrievers/auto_retriever.py +28 -10
  48. camel/retrievers/vector_retriever.py +72 -44
  49. camel/societies/babyagi_playing.py +6 -3
  50. camel/societies/role_playing.py +17 -3
  51. camel/storages/__init__.py +2 -0
  52. camel/storages/graph_storages/__init__.py +2 -0
  53. camel/storages/graph_storages/graph_element.py +3 -5
  54. camel/storages/graph_storages/nebula_graph.py +547 -0
  55. camel/storages/key_value_storages/json.py +6 -1
  56. camel/tasks/task.py +11 -4
  57. camel/tasks/task_prompt.py +4 -0
  58. camel/toolkits/__init__.py +20 -7
  59. camel/toolkits/arxiv_toolkit.py +155 -0
  60. camel/toolkits/ask_news_toolkit.py +653 -0
  61. camel/toolkits/base.py +2 -3
  62. camel/toolkits/code_execution.py +6 -7
  63. camel/toolkits/dalle_toolkit.py +6 -6
  64. camel/toolkits/{openai_function.py → function_tool.py} +34 -11
  65. camel/toolkits/github_toolkit.py +9 -10
  66. camel/toolkits/google_maps_toolkit.py +7 -7
  67. camel/toolkits/google_scholar_toolkit.py +146 -0
  68. camel/toolkits/linkedin_toolkit.py +7 -7
  69. camel/toolkits/math_toolkit.py +8 -8
  70. camel/toolkits/open_api_toolkit.py +5 -5
  71. camel/toolkits/reddit_toolkit.py +7 -7
  72. camel/toolkits/retrieval_toolkit.py +5 -5
  73. camel/toolkits/search_toolkit.py +9 -9
  74. camel/toolkits/slack_toolkit.py +11 -11
  75. camel/toolkits/twitter_toolkit.py +378 -452
  76. camel/toolkits/weather_toolkit.py +6 -6
  77. camel/toolkits/whatsapp_toolkit.py +177 -0
  78. camel/types/__init__.py +6 -1
  79. camel/types/enums.py +43 -85
  80. camel/types/openai_types.py +3 -0
  81. camel/types/unified_model_type.py +104 -0
  82. camel/utils/__init__.py +0 -2
  83. camel/utils/async_func.py +7 -7
  84. camel/utils/commons.py +40 -4
  85. camel/utils/token_counting.py +30 -212
  86. camel/workforce/__init__.py +6 -6
  87. camel/workforce/base.py +9 -5
  88. camel/workforce/prompts.py +179 -0
  89. camel/workforce/role_playing_worker.py +181 -0
  90. camel/workforce/{single_agent_node.py → single_agent_worker.py} +49 -23
  91. camel/workforce/task_channel.py +7 -8
  92. camel/workforce/utils.py +20 -50
  93. camel/workforce/{worker_node.py → worker.py} +15 -12
  94. camel/workforce/workforce.py +456 -19
  95. camel_ai-0.2.3.dist-info/LICENSE +201 -0
  96. {camel_ai-0.2.0.dist-info → camel_ai-0.2.3.dist-info}/METADATA +39 -65
  97. {camel_ai-0.2.0.dist-info → camel_ai-0.2.3.dist-info}/RECORD +98 -86
  98. {camel_ai-0.2.0.dist-info → camel_ai-0.2.3.dist-info}/WHEEL +1 -1
  99. camel/models/open_source_model.py +0 -170
  100. camel/workforce/manager_node.py +0 -299
  101. camel/workforce/role_playing_node.py +0 -168
  102. camel/workforce/workforce_prompt.py +0 -125
@@ -14,6 +14,8 @@
14
14
  import uuid
15
15
  import warnings
16
16
  from typing import (
17
+ IO,
18
+ TYPE_CHECKING,
17
19
  Any,
18
20
  Dict,
19
21
  List,
@@ -23,7 +25,8 @@ from typing import (
23
25
  Union,
24
26
  )
25
27
 
26
- from unstructured.documents.elements import Element
28
+ if TYPE_CHECKING:
29
+ from unstructured.documents.elements import Element
27
30
 
28
31
 
29
32
  class UnstructuredIO:
@@ -39,33 +42,34 @@ class UnstructuredIO:
39
42
  @staticmethod
40
43
  def create_element_from_text(
41
44
  text: str,
42
- element_id: Optional[Union[str, uuid.UUID]] = None,
45
+ element_id: Optional[str] = None,
43
46
  embeddings: Optional[List[float]] = None,
44
47
  filename: Optional[str] = None,
45
48
  file_directory: Optional[str] = None,
46
49
  last_modified: Optional[str] = None,
47
50
  filetype: Optional[str] = None,
48
- parent_id: Optional[Union[str, uuid.UUID]] = None,
49
- ) -> Element:
51
+ parent_id: Optional[str] = None,
52
+ ) -> "Element":
50
53
  r"""Creates a Text element from a given text input, with optional
51
54
  metadata and embeddings.
52
55
 
53
56
  Args:
54
57
  text (str): The text content for the element.
55
- element_id (Optional[Union[str, uuid.UUID]], optional): Unique
56
- identifier for the element. Defaults to `None`.
57
- embeddings (Optional[List[float]], optional): A list of float
58
- numbers representing the text embeddings. Defaults to `None`.
58
+ element_id (Optional[str], optional): Unique identifier for the
59
+ element. (default: :obj:`None`)
60
+ embeddings (List[float], optional): A list of float
61
+ numbers representing the text embeddings.
62
+ (default: :obj:`None`)
59
63
  filename (Optional[str], optional): The name of the file the
60
- element is associated with. Defaults to `None`.
64
+ element is associated with. (default: :obj:`None`)
61
65
  file_directory (Optional[str], optional): The directory path where
62
- the file is located. Defaults to `None`.
66
+ the file is located. (default: :obj:`None`)
63
67
  last_modified (Optional[str], optional): The last modified date of
64
- the file. Defaults to `None`.
65
- filetype (Optional[str], optional): The type of the file. Defaults
66
- to `None`.
67
- parent_id (Optional[Union[str, uuid.UUID]], optional): The
68
- identifier of the parent element. Defaults to `None`.
68
+ the file. (default: :obj:`None`)
69
+ filetype (Optional[str], optional): The type of the file.
70
+ (default: :obj:`None`)
71
+ parent_id (Optional[str], optional): The identifier of the parent
72
+ element. (default: :obj:`None`)
69
73
 
70
74
  Returns:
71
75
  Element: An instance of Text with the provided content and
@@ -83,7 +87,7 @@ class UnstructuredIO:
83
87
 
84
88
  return Text(
85
89
  text=text,
86
- element_id=element_id or uuid.uuid4(),
90
+ element_id=element_id or str(uuid.uuid4()),
87
91
  metadata=metadata,
88
92
  embeddings=embeddings,
89
93
  )
@@ -92,7 +96,7 @@ class UnstructuredIO:
92
96
  def parse_file_or_url(
93
97
  input_path: str,
94
98
  **kwargs: Any,
95
- ) -> Union[List[Element], None]:
99
+ ) -> Union[List["Element"], None]:
96
100
  r"""Loads a file or a URL and parses its contents into elements.
97
101
 
98
102
  Args:
@@ -108,7 +112,7 @@ class UnstructuredIO:
108
112
  specified.
109
113
 
110
114
  Notes:
111
- Available document types:
115
+ Supported file types:
112
116
  "csv", "doc", "docx", "epub", "image", "md", "msg", "odt",
113
117
  "org", "pdf", "ppt", "pptx", "rtf", "rst", "tsv", "xlsx".
114
118
 
@@ -118,25 +122,23 @@ class UnstructuredIO:
118
122
  import os
119
123
  from urllib.parse import urlparse
120
124
 
125
+ from unstructured.partition.auto import partition
126
+
121
127
  # Check if the input is a URL
122
128
  parsed_url = urlparse(input_path)
123
129
  is_url = all([parsed_url.scheme, parsed_url.netloc])
124
130
 
131
+ # Handling URL
125
132
  if is_url:
126
- # Handling URL
127
- from unstructured.partition.html import partition_html
128
-
129
133
  try:
130
- elements = partition_html(url=input_path, **kwargs)
134
+ elements = partition(url=input_path, **kwargs)
131
135
  return elements
132
136
  except Exception:
133
137
  warnings.warn(f"Failed to parse the URL: {input_path}")
134
138
  return None
135
139
 
140
+ # Handling file
136
141
  else:
137
- # Handling file
138
- from unstructured.partition.auto import partition
139
-
140
142
  # Check if the file exists
141
143
  if not os.path.exists(input_path):
142
144
  raise FileNotFoundError(
@@ -152,6 +154,39 @@ class UnstructuredIO:
152
154
  warnings.warn(f"Failed to partition the file: {input_path}")
153
155
  return None
154
156
 
157
+ @staticmethod
158
+ def parse_bytes(
159
+ file: IO[bytes], **kwargs: Any
160
+ ) -> Union[List["Element"], None]:
161
+ r"""Parses a bytes stream and converts its contents into elements.
162
+
163
+ Args:
164
+ file (IO[bytes]): The file in bytes format to be parsed.
165
+ **kwargs: Extra kwargs passed to the partition function.
166
+
167
+ Returns:
168
+ Union[List[Element], None]: List of elements after parsing the file
169
+ if successful, otherwise `None`.
170
+
171
+ Notes:
172
+ Supported file types:
173
+ "csv", "doc", "docx", "epub", "image", "md", "msg", "odt",
174
+ "org", "pdf", "ppt", "pptx", "rtf", "rst", "tsv", "xlsx".
175
+
176
+ References:
177
+ https://docs.unstructured.io/open-source/core-functionality/partitioning
178
+ """
179
+
180
+ from unstructured.partition.auto import partition
181
+
182
+ try:
183
+ # Use partition to process the bytes stream
184
+ elements = partition(file=file, **kwargs)
185
+ return elements
186
+ except Exception as e:
187
+ warnings.warn(f"Failed to partition the file stream: {e}")
188
+ return None
189
+
155
190
  @staticmethod
156
191
  def clean_text_data(
157
192
  text: str,
@@ -162,7 +197,7 @@ class UnstructuredIO:
162
197
 
163
198
  This function applies multiple text cleaning utilities by calling the
164
199
  `unstructured` library's cleaning bricks for operations like
165
- replacing unicode quotes, removing extra whitespace, dashes, non-ascii
200
+ replacing Unicode quotes, removing extra whitespace, dashes, non-ascii
166
201
  characters, and more.
167
202
 
168
203
  If no cleaning options are provided, a default set of cleaning
@@ -249,7 +284,8 @@ class UnstructuredIO:
249
284
  )
250
285
  else:
251
286
  raise ValueError(
252
- f"'{func_name}' is not a valid function in `unstructured`."
287
+ f"'{func_name}' is not a valid function in "
288
+ "`Unstructured IO`."
253
289
  )
254
290
 
255
291
  return cleaned_text
@@ -406,8 +442,8 @@ class UnstructuredIO:
406
442
 
407
443
  @staticmethod
408
444
  def chunk_elements(
409
- elements: List[Any], chunk_type: str, **kwargs
410
- ) -> List[Element]:
445
+ elements: List["Element"], chunk_type: str, **kwargs
446
+ ) -> List["Element"]:
411
447
  r"""Chunks elements by titles.
412
448
 
413
449
  Args:
@@ -32,6 +32,7 @@ __all__ = [
32
32
  'OpenAISystemMessage',
33
33
  'OpenAIAssistantMessage',
34
34
  'OpenAIUserMessage',
35
+ 'OpenAIFunctionMessage',
35
36
  'OpenAIMessage',
36
37
  'BaseMessage',
37
38
  'FunctionCallingMessage',
@@ -93,10 +93,10 @@ class FunctionCallingMessage(BaseMessage):
93
93
  OpenAIMessage: The converted :obj:`OpenAIMessage` object
94
94
  with its role being "function".
95
95
  """
96
- if (not self.func_name) or (not self.result):
96
+ if not self.func_name:
97
97
  raise ValueError(
98
98
  "Invalid request for converting into function message"
99
- " due to missing function name or results."
99
+ " due to missing function name."
100
100
  )
101
101
 
102
102
  result_content = {"result": {str(self.result)}}
camel/models/__init__.py CHANGED
@@ -21,9 +21,8 @@ from .mistral_model import MistralModel
21
21
  from .model_factory import ModelFactory
22
22
  from .nemotron_model import NemotronModel
23
23
  from .ollama_model import OllamaModel
24
- from .open_source_model import OpenSourceModel
25
24
  from .openai_audio_models import OpenAIAudioModels
26
- from .openai_compatibility_model import OpenAICompatibilityModel
25
+ from .openai_compatible_model import OpenAICompatibleModel
27
26
  from .openai_model import OpenAIModel
28
27
  from .reka_model import RekaModel
29
28
  from .samba_model import SambaModel
@@ -41,7 +40,6 @@ __all__ = [
41
40
  'GroqModel',
42
41
  'StubModel',
43
42
  'ZhipuAIModel',
44
- 'OpenSourceModel',
45
43
  'ModelFactory',
46
44
  'LiteLLMModel',
47
45
  'OpenAIAudioModels',
@@ -49,7 +47,7 @@ __all__ = [
49
47
  'OllamaModel',
50
48
  'VLLMModel',
51
49
  'GeminiModel',
52
- 'OpenAICompatibilityModel',
50
+ 'OpenAICompatibleModel',
53
51
  'RekaModel',
54
52
  'SambaModel',
55
53
  'TogetherAIModel',
@@ -12,11 +12,9 @@
12
12
  # limitations under the License.
13
13
  # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
14
  import os
15
- from typing import Any, Dict, List, Optional
15
+ from typing import Any, Dict, List, Optional, Union
16
16
 
17
- from anthropic import NOT_GIVEN, Anthropic
18
-
19
- from camel.configs import ANTHROPIC_API_PARAMS
17
+ from camel.configs import ANTHROPIC_API_PARAMS, AnthropicConfig
20
18
  from camel.messages import OpenAIMessage
21
19
  from camel.models.base_model import BaseModelBackend
22
20
  from camel.types import ChatCompletion, ModelType
@@ -24,40 +22,47 @@ from camel.utils import (
24
22
  AnthropicTokenCounter,
25
23
  BaseTokenCounter,
26
24
  api_keys_required,
25
+ dependencies_required,
27
26
  )
28
27
 
29
28
 
30
29
  class AnthropicModel(BaseModelBackend):
31
- r"""Anthropic API in a unified BaseModelBackend interface."""
32
-
30
+ r"""Anthropic API in a unified BaseModelBackend interface.
31
+
32
+ Args:
33
+ model_type (Union[ModelType, str]): Model for which a backend is
34
+ created, one of CLAUDE_* series.
35
+ model_config_dict (Optional[Dict[str, Any]], optional): A dictionary
36
+ that will be fed into Anthropic.messages.create(). If
37
+ :obj:`None`, :obj:`AnthropicConfig().as_dict()` will be used.
38
+ (default::obj:`None`)
39
+ api_key (Optional[str], optional): The API key for authenticating with
40
+ the Anthropic service. (default: :obj:`None`)
41
+ url (Optional[str], optional): The url to the Anthropic service.
42
+ (default: :obj:`None`)
43
+ token_counter (Optional[BaseTokenCounter], optional): Token counter to
44
+ use for the model. If not provided, :obj:`AnthropicTokenCounter`
45
+ will be used. (default: :obj:`None`)
46
+ """
47
+
48
+ @dependencies_required('anthropic')
33
49
  def __init__(
34
50
  self,
35
- model_type: ModelType,
36
- model_config_dict: Dict[str, Any],
51
+ model_type: Union[ModelType, str],
52
+ model_config_dict: Optional[Dict[str, Any]] = None,
37
53
  api_key: Optional[str] = None,
38
54
  url: Optional[str] = None,
39
55
  token_counter: Optional[BaseTokenCounter] = None,
40
56
  ) -> None:
41
- r"""Constructor for Anthropic backend.
57
+ from anthropic import Anthropic
42
58
 
43
- Args:
44
- model_type (ModelType): Model for which a backend is created,
45
- one of CLAUDE_* series.
46
- model_config_dict (Dict[str, Any]): A dictionary that will
47
- be fed into Anthropic.messages.create().
48
- api_key (Optional[str]): The API key for authenticating with the
49
- Anthropic service. (default: :obj:`None`)
50
- url (Optional[str]): The url to the Anthropic service. (default:
51
- :obj:`None`)
52
- token_counter (Optional[BaseTokenCounter]): Token counter to use
53
- for the model. If not provided, `AnthropicTokenCounter` will
54
- be used.
55
- """
59
+ if model_config_dict is None:
60
+ model_config_dict = AnthropicConfig().as_dict()
61
+ api_key = api_key or os.environ.get("ANTHROPIC_API_KEY")
62
+ url = url or os.environ.get("ANTHROPIC_API_BASE_URL")
56
63
  super().__init__(
57
64
  model_type, model_config_dict, api_key, url, token_counter
58
65
  )
59
- self._api_key = api_key or os.environ.get("ANTHROPIC_API_KEY")
60
- self._url = url or os.environ.get("ANTHROPIC_API_BASE_URL")
61
66
  self.client = Anthropic(api_key=self._api_key, base_url=self._url)
62
67
 
63
68
  def _convert_response_from_anthropic_to_openai(self, response):
@@ -89,7 +94,7 @@ class AnthropicModel(BaseModelBackend):
89
94
  tokenization style.
90
95
  """
91
96
  if not self._token_counter:
92
- self._token_counter = AnthropicTokenCounter(self.model_type)
97
+ self._token_counter = AnthropicTokenCounter()
93
98
  return self._token_counter
94
99
 
95
100
  def count_tokens_from_prompt(self, prompt: str) -> int:
@@ -117,13 +122,14 @@ class AnthropicModel(BaseModelBackend):
117
122
  Returns:
118
123
  ChatCompletion: Response in the OpenAI API format.
119
124
  """
125
+ from anthropic import NOT_GIVEN
120
126
 
121
127
  if messages[0]["role"] == "system":
122
128
  sys_msg = str(messages.pop(0)["content"])
123
129
  else:
124
130
  sys_msg = NOT_GIVEN # type: ignore[assignment]
125
131
  response = self.client.messages.create(
126
- model=self.model_type.value,
132
+ model=self.model_type,
127
133
  system=sys_msg,
128
134
  messages=messages, # type: ignore[arg-type]
129
135
  **self.model_config_dict,
@@ -16,60 +16,65 @@ from typing import Any, Dict, List, Optional, Union
16
16
 
17
17
  from openai import AzureOpenAI, Stream
18
18
 
19
- from camel.configs import OPENAI_API_PARAMS
19
+ from camel.configs import OPENAI_API_PARAMS, ChatGPTConfig
20
20
  from camel.messages import OpenAIMessage
21
21
  from camel.models.base_model import BaseModelBackend
22
- from camel.types import ChatCompletion, ChatCompletionChunk, ModelType
22
+ from camel.types import (
23
+ ChatCompletion,
24
+ ChatCompletionChunk,
25
+ ModelType,
26
+ )
23
27
  from camel.utils import BaseTokenCounter, OpenAITokenCounter, api_keys_required
24
28
 
25
29
 
26
30
  class AzureOpenAIModel(BaseModelBackend):
27
31
  r"""Azure OpenAI API in a unified BaseModelBackend interface.
28
- Doc: https://learn.microsoft.com/en-us/azure/ai-services/openai/
32
+
33
+ Args:
34
+ model_type (Union[ModelType, str]): Model for which a backend is
35
+ created, one of GPT_* series.
36
+ model_config_dict (Optional[Dict[str, Any]], optional): A dictionary
37
+ that will be fed into:obj:`openai.ChatCompletion.create()`. If
38
+ :obj:`None`, :obj:`ChatGPTConfig().as_dict()` will be used.
39
+ (default: :obj:`None`)
40
+ api_key (Optional[str], optional): The API key for authenticating with
41
+ the OpenAI service. (default: :obj:`None`)
42
+ url (Optional[str], optional): The url to the OpenAI service.
43
+ (default: :obj:`None`)
44
+ api_version (Optional[str], optional): The api version for the model.
45
+ (default: :obj:`None`)
46
+ azure_deployment_name (Optional[str], optional): The deployment name
47
+ you chose when you deployed an azure model. (default: :obj:`None`)
48
+ token_counter (Optional[BaseTokenCounter], optional): Token counter to
49
+ use for the model. If not provided, :obj:`OpenAITokenCounter`
50
+ will be used. (default: :obj:`None`)
51
+
52
+ References:
53
+ https://learn.microsoft.com/en-us/azure/ai-services/openai/
29
54
  """
30
55
 
31
56
  def __init__(
32
57
  self,
33
- model_type: ModelType,
34
- model_config_dict: Dict[str, Any],
58
+ model_type: Union[ModelType, str],
59
+ model_config_dict: Optional[Dict[str, Any]] = None,
35
60
  api_key: Optional[str] = None,
36
61
  url: Optional[str] = None,
62
+ token_counter: Optional[BaseTokenCounter] = None,
37
63
  api_version: Optional[str] = None,
38
64
  azure_deployment_name: Optional[str] = None,
39
65
  ) -> None:
40
- r"""Constructor for OpenAI backend.
66
+ if model_config_dict is None:
67
+ model_config_dict = ChatGPTConfig().as_dict()
68
+ api_key = api_key or os.environ.get("AZURE_OPENAI_API_KEY")
69
+ url = url or os.environ.get("AZURE_OPENAI_BASE_URL")
70
+ super().__init__(
71
+ model_type, model_config_dict, api_key, url, token_counter
72
+ )
41
73
 
42
- Args:
43
- model_type (ModelType): Model for which a backend is created,
44
- one of GPT_* series.
45
- model_config_dict (Dict[str, Any]): A dictionary that will
46
- be fed into openai.ChatCompletion.create().
47
- api_key (Optional[str]): The API key for authenticating with the
48
- OpenAI service. (default: :obj:`None`)
49
- url (Optional[str]): The url to the OpenAI service. (default:
50
- :obj:`None`)
51
- api_version (Optional[str]): The api version for the model.
52
- azure_deployment_name (Optional[str]): The deployment name you
53
- chose when you deployed an azure model. (default: :obj:`None`)
54
- """
55
- super().__init__(model_type, model_config_dict, api_key, url)
56
- self._url = url or os.environ.get("AZURE_OPENAI_ENDPOINT")
57
- self._api_key = api_key or os.environ.get("AZURE_OPENAI_API_KEY")
58
74
  self.api_version = api_version or os.environ.get("AZURE_API_VERSION")
59
75
  self.azure_deployment_name = azure_deployment_name or os.environ.get(
60
76
  "AZURE_DEPLOYMENT_NAME"
61
77
  )
62
-
63
- if self._url is None:
64
- raise ValueError(
65
- "Must provide either the `url` argument "
66
- "or `AZURE_OPENAI_ENDPOINT` environment variable."
67
- )
68
- if self._api_key is None:
69
- raise ValueError(
70
- "Must provide either the `api_key` argument "
71
- "or `AZURE_OPENAI_API_KEY` environment variable."
72
- )
73
78
  if self.api_version is None:
74
79
  raise ValueError(
75
80
  "Must provide either the `api_version` argument "
@@ -80,7 +85,6 @@ class AzureOpenAIModel(BaseModelBackend):
80
85
  "Must provide either the `azure_deployment_name` argument "
81
86
  "or `AZURE_DEPLOYMENT_NAME` environment variable."
82
87
  )
83
- self.model = str(self.azure_deployment_name)
84
88
 
85
89
  self._client = AzureOpenAI(
86
90
  azure_endpoint=str(self._url),
@@ -90,7 +94,6 @@ class AzureOpenAIModel(BaseModelBackend):
90
94
  timeout=60,
91
95
  max_retries=3,
92
96
  )
93
- self._token_counter: Optional[BaseTokenCounter] = None
94
97
 
95
98
  @property
96
99
  def token_counter(self) -> BaseTokenCounter:
@@ -122,7 +125,7 @@ class AzureOpenAIModel(BaseModelBackend):
122
125
  """
123
126
  response = self._client.chat.completions.create(
124
127
  messages=messages,
125
- model=self.model,
128
+ model=self.azure_deployment_name, # type:ignore[arg-type]
126
129
  **self.model_config_dict,
127
130
  )
128
131
  return response
@@ -17,41 +17,49 @@ from typing import Any, Dict, List, Optional, Union
17
17
  from openai import Stream
18
18
 
19
19
  from camel.messages import OpenAIMessage
20
- from camel.types import ChatCompletion, ChatCompletionChunk, ModelType
20
+ from camel.types import (
21
+ ChatCompletion,
22
+ ChatCompletionChunk,
23
+ ModelType,
24
+ UnifiedModelType,
25
+ )
21
26
  from camel.utils import BaseTokenCounter
22
27
 
23
28
 
24
29
  class BaseModelBackend(ABC):
25
30
  r"""Base class for different model backends.
26
- May be OpenAI API, a local LLM, a stub for unit tests, etc.
31
+ It may be OpenAI API, a local LLM, a stub for unit tests, etc.
32
+
33
+ Args:
34
+ model_type (Union[ModelType, str]): Model for which a backend is
35
+ created.
36
+ model_config_dict (Optional[Dict[str, Any]], optional): A config
37
+ dictionary. (default: :obj:`{}`)
38
+ api_key (Optional[str], optional): The API key for authenticating
39
+ with the model service. (default: :obj:`None`)
40
+ url (Optional[str], optional): The url to the model service.
41
+ (default: :obj:`None`)
42
+ token_counter (Optional[BaseTokenCounter], optional): Token
43
+ counter to use for the model. If not provided,
44
+ :obj:`OpenAITokenCounter` will be used. (default: :obj:`None`)
27
45
  """
28
46
 
29
47
  def __init__(
30
48
  self,
31
- model_type: ModelType,
32
- model_config_dict: Dict[str, Any],
49
+ model_type: Union[ModelType, str],
50
+ model_config_dict: Optional[Dict[str, Any]] = None,
33
51
  api_key: Optional[str] = None,
34
52
  url: Optional[str] = None,
35
53
  token_counter: Optional[BaseTokenCounter] = None,
36
54
  ) -> None:
37
- r"""Constructor for the model backend.
38
-
39
- Args:
40
- model_type (ModelType): Model for which a backend is created.
41
- model_config_dict (Dict[str, Any]): A config dictionary.
42
- api_key (Optional[str]): The API key for authenticating with the
43
- model service.
44
- url (Optional[str]): The url to the model service.
45
- token_counter (Optional[BaseTokenCounter]): Token counter to use
46
- for the model. If not provided, `OpenAITokenCounter` will
47
- be used.
48
- """
49
- self.model_type = model_type
55
+ self.model_type: UnifiedModelType = UnifiedModelType(model_type)
56
+ if model_config_dict is None:
57
+ model_config_dict = {}
50
58
  self.model_config_dict = model_config_dict
51
59
  self._api_key = api_key
52
60
  self._url = url
53
- self.check_model_config()
54
61
  self._token_counter = token_counter
62
+ self.check_model_config()
55
63
 
56
64
  @property
57
65
  @abstractmethod
@@ -110,6 +118,9 @@ class BaseModelBackend(ABC):
110
118
  def token_limit(self) -> int:
111
119
  r"""Returns the maximum token limit for a given model.
112
120
 
121
+ This method retrieves the maximum token limit either from the
122
+ `model_config_dict` or from the model's default token limit.
123
+
113
124
  Returns:
114
125
  int: The maximum token limit for the given model.
115
126
  """
@@ -120,8 +131,8 @@ class BaseModelBackend(ABC):
120
131
 
121
132
  @property
122
133
  def stream(self) -> bool:
123
- r"""Returns whether the model is in stream mode,
124
- which sends partial results each time.
134
+ r"""Returns whether the model is in stream mode, which sends partial
135
+ results each time.
125
136
 
126
137
  Returns:
127
138
  bool: Whether the model is in stream mode.
@@ -11,9 +11,10 @@
11
11
  # See the License for the specific language governing permissions and
12
12
  # limitations under the License.
13
13
  # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
- from typing import TYPE_CHECKING, Any, Dict, List, Optional
14
+ import os
15
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
15
16
 
16
- from camel.configs import Gemini_API_PARAMS
17
+ from camel.configs import Gemini_API_PARAMS, GeminiConfig
17
18
  from camel.messages import OpenAIMessage
18
19
  from camel.models import BaseModelBackend
19
20
  from camel.types import (
@@ -26,6 +27,7 @@ from camel.utils import (
26
27
  BaseTokenCounter,
27
28
  GeminiTokenCounter,
28
29
  api_keys_required,
30
+ dependencies_required,
29
31
  )
30
32
 
31
33
  if TYPE_CHECKING:
@@ -33,43 +35,49 @@ if TYPE_CHECKING:
33
35
 
34
36
 
35
37
  class GeminiModel(BaseModelBackend):
36
- r"""Gemini API in a unified BaseModelBackend interface."""
37
-
38
- # NOTE: Currently "stream": True is not supported with Gemini due to the
39
- # limitation of the current camel design.
40
-
38
+ r"""Gemini API in a unified BaseModelBackend interface.
39
+
40
+ Args:
41
+ model_type (Union[ModelType, str]): Model for which a backend is
42
+ created.
43
+ model_config_dict (Optional[Dict[str, Any]], optional): A dictionary
44
+ that will be fed into:obj:`genai.GenerativeModel.generate_content()
45
+ `. If:obj:`None`, :obj:`GeminiConfig().as_dict()` will be used.
46
+ (default: :obj:`None`)
47
+ api_key (Optional[str], optional): The API key for authenticating with
48
+ the gemini service. (default: :obj:`None`)
49
+ url (Optional[str], optional): The url to the gemini service.
50
+ (default: :obj:`None`)
51
+ token_counter (Optional[BaseTokenCounter], optional): Token counter to
52
+ use for the model. If not provided, :obj:`GeminiTokenCounter` will
53
+ be used. (default: :obj:`None`)
54
+
55
+ Notes:
56
+ Currently :obj:`"stream": True` is not supported with Gemini due to the
57
+ limitation of the current camel design.
58
+ """
59
+
60
+ @dependencies_required('google')
41
61
  def __init__(
42
62
  self,
43
- model_type: ModelType,
44
- model_config_dict: Dict[str, Any],
63
+ model_type: Union[ModelType, str],
64
+ model_config_dict: Optional[Dict[str, Any]] = None,
45
65
  api_key: Optional[str] = None,
46
66
  url: Optional[str] = None,
47
67
  token_counter: Optional[BaseTokenCounter] = None,
48
68
  ) -> None:
49
- r"""Constructor for Gemini backend.
50
-
51
- Args:
52
- model_type (ModelType): Model for which a backend is created.
53
- model_config_dict (Dict[str, Any]): A dictionary that will
54
- be fed into generate_content().
55
- api_key (Optional[str]): The API key for authenticating with the
56
- gemini service. (default: :obj:`None`)
57
- url (Optional[str]): The url to the gemini service.
58
- token_counter (Optional[BaseTokenCounter]): Token counter to use
59
- for the model. If not provided, `GeminiTokenCounter` will be
60
- used.
61
- """
62
- import os
63
-
64
69
  import google.generativeai as genai
65
70
  from google.generativeai.types.generation_types import GenerationConfig
66
71
 
72
+ if model_config_dict is None:
73
+ model_config_dict = GeminiConfig().as_dict()
74
+
75
+ api_key = api_key or os.environ.get("GOOGLE_API_KEY")
67
76
  super().__init__(
68
77
  model_type, model_config_dict, api_key, url, token_counter
69
78
  )
70
- self._api_key = api_key or os.environ.get("GOOGLE_API_KEY")
71
79
  genai.configure(api_key=self._api_key)
72
- self._client = genai.GenerativeModel(self.model_type.value)
80
+ self._client = genai.GenerativeModel(self.model_type)
73
81
 
74
82
  keys = list(self.model_config_dict.keys())
75
83
  generation_config_dict = {
@@ -143,8 +151,8 @@ class GeminiModel(BaseModelBackend):
143
151
  return self.model_config_dict.get('stream', False)
144
152
 
145
153
  def to_gemini_req(self, messages: List[OpenAIMessage]) -> 'ContentsType':
146
- r"""Converts the request from the OpenAI API format to
147
- the Gemini API request format.
154
+ r"""Converts the request from the OpenAI API format to the Gemini API
155
+ request format.
148
156
 
149
157
  Args:
150
158
  messages: The request object from the OpenAI API.
@@ -189,7 +197,7 @@ class GeminiModel(BaseModelBackend):
189
197
  id=f"chatcmpl-{uuid.uuid4().hex!s}",
190
198
  object="chat.completion",
191
199
  created=int(time.time()),
192
- model=self.model_type.value,
200
+ model=self.model_type,
193
201
  choices=[],
194
202
  )
195
203
  for i, candidate in enumerate(response.candidates):