camel-ai 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of camel-ai might be problematic. Click here for more details.

Files changed (75) hide show
  1. camel/__init__.py +30 -0
  2. camel/agents/__init__.py +40 -0
  3. camel/agents/base.py +29 -0
  4. camel/agents/chat_agent.py +539 -0
  5. camel/agents/critic_agent.py +179 -0
  6. camel/agents/embodied_agent.py +138 -0
  7. camel/agents/role_assignment_agent.py +117 -0
  8. camel/agents/task_agent.py +382 -0
  9. camel/agents/tool_agents/__init__.py +20 -0
  10. camel/agents/tool_agents/base.py +40 -0
  11. camel/agents/tool_agents/hugging_face_tool_agent.py +203 -0
  12. camel/configs.py +159 -0
  13. camel/embeddings/__init__.py +20 -0
  14. camel/embeddings/base.py +65 -0
  15. camel/embeddings/openai_embedding.py +74 -0
  16. camel/functions/__init__.py +27 -0
  17. camel/functions/base_io_functions.py +261 -0
  18. camel/functions/math_functions.py +61 -0
  19. camel/functions/openai_function.py +88 -0
  20. camel/functions/search_functions.py +309 -0
  21. camel/functions/unstructured_io_fuctions.py +616 -0
  22. camel/functions/weather_functions.py +136 -0
  23. camel/generators.py +263 -0
  24. camel/human.py +130 -0
  25. camel/memories/__init__.py +28 -0
  26. camel/memories/base.py +75 -0
  27. camel/memories/chat_history_memory.py +111 -0
  28. camel/memories/context_creators/__init__.py +18 -0
  29. camel/memories/context_creators/base.py +72 -0
  30. camel/memories/context_creators/score_based.py +130 -0
  31. camel/memories/records.py +92 -0
  32. camel/messages/__init__.py +38 -0
  33. camel/messages/base.py +223 -0
  34. camel/messages/func_message.py +106 -0
  35. camel/models/__init__.py +26 -0
  36. camel/models/base_model.py +110 -0
  37. camel/models/model_factory.py +59 -0
  38. camel/models/open_source_model.py +144 -0
  39. camel/models/openai_model.py +103 -0
  40. camel/models/stub_model.py +106 -0
  41. camel/prompts/__init__.py +38 -0
  42. camel/prompts/ai_society.py +121 -0
  43. camel/prompts/base.py +227 -0
  44. camel/prompts/code.py +111 -0
  45. camel/prompts/evaluation.py +40 -0
  46. camel/prompts/misalignment.py +84 -0
  47. camel/prompts/prompt_templates.py +117 -0
  48. camel/prompts/role_description_prompt_template.py +53 -0
  49. camel/prompts/solution_extraction.py +44 -0
  50. camel/prompts/task_prompt_template.py +56 -0
  51. camel/prompts/translation.py +42 -0
  52. camel/responses/__init__.py +18 -0
  53. camel/responses/agent_responses.py +42 -0
  54. camel/societies/__init__.py +20 -0
  55. camel/societies/babyagi_playing.py +254 -0
  56. camel/societies/role_playing.py +456 -0
  57. camel/storages/__init__.py +23 -0
  58. camel/storages/key_value_storages/__init__.py +23 -0
  59. camel/storages/key_value_storages/base.py +57 -0
  60. camel/storages/key_value_storages/in_memory.py +51 -0
  61. camel/storages/key_value_storages/json.py +97 -0
  62. camel/terminators/__init__.py +23 -0
  63. camel/terminators/base.py +44 -0
  64. camel/terminators/response_terminator.py +118 -0
  65. camel/terminators/token_limit_terminator.py +55 -0
  66. camel/types/__init__.py +54 -0
  67. camel/types/enums.py +176 -0
  68. camel/types/openai_types.py +39 -0
  69. camel/utils/__init__.py +47 -0
  70. camel/utils/commons.py +243 -0
  71. camel/utils/python_interpreter.py +435 -0
  72. camel/utils/token_counting.py +220 -0
  73. camel_ai-0.1.1.dist-info/METADATA +311 -0
  74. camel_ai-0.1.1.dist-info/RECORD +75 -0
  75. camel_ai-0.1.1.dist-info/WHEEL +4 -0
@@ -0,0 +1,106 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ from dataclasses import dataclass
15
+ from typing import Any, Dict, Optional
16
+
17
+ from camel.messages import (
18
+ BaseMessage,
19
+ OpenAIAssistantMessage,
20
+ OpenAIFunctionMessage,
21
+ OpenAIMessage,
22
+ )
23
+ from camel.types import OpenAIBackendRole
24
+
25
+
26
+ @dataclass
27
+ class FunctionCallingMessage(BaseMessage):
28
+ r"""Class for message objects used specifically for
29
+ function-related messages.
30
+
31
+ Args:
32
+ func_name (Optional[str]): The name of the function used.
33
+ (default: :obj:`None`)
34
+ args (Optional[Dict]): The dictionary of arguments passed to the
35
+ function. (default: :obj:`None`)
36
+ result (Optional[Any]): The result of function execution.
37
+ (default: :obj:`None`)
38
+ """
39
+ func_name: Optional[str] = None
40
+ args: Optional[Dict] = None
41
+ result: Optional[Any] = None
42
+
43
+ def to_openai_message(
44
+ self,
45
+ role_at_backend: OpenAIBackendRole,
46
+ ) -> OpenAIMessage:
47
+ r"""Converts the message to an :obj:`OpenAIMessage` object.
48
+
49
+ Args:
50
+ role_at_backend (OpenAIBackendRole): The role of the message in
51
+ OpenAI chat system.
52
+
53
+ Returns:
54
+ OpenAIMessage: The converted :obj:`OpenAIMessage` object.
55
+ """
56
+ if role_at_backend == OpenAIBackendRole.ASSISTANT:
57
+ return self.to_openai_assistant_message()
58
+ elif role_at_backend == OpenAIBackendRole.FUNCTION:
59
+ return self.to_openai_function_message()
60
+ else:
61
+ raise ValueError(f"Unsupported role: {role_at_backend}.")
62
+
63
+ def to_openai_assistant_message(self) -> OpenAIAssistantMessage:
64
+ r"""Converts the message to an :obj:`OpenAIAssistantMessage` object.
65
+
66
+ Returns:
67
+ OpenAIAssistantMessage: The converted :obj:`OpenAIAssistantMessage`
68
+ object.
69
+ """
70
+ if (not self.func_name) or (not self.args):
71
+ raise ValueError(
72
+ "Invalid request for converting into assistant message"
73
+ " due to missing function name or arguments.")
74
+
75
+ msg_dict: OpenAIAssistantMessage = {
76
+ "role": "assistant",
77
+ "content": self.content,
78
+ "function_call": {
79
+ "name": self.func_name,
80
+ "arguments": str(self.args),
81
+ }
82
+ }
83
+
84
+ return msg_dict
85
+
86
+ def to_openai_function_message(self) -> OpenAIFunctionMessage:
87
+ r"""Converts the message to an :obj:`OpenAIMessage` object
88
+ with the role being "function".
89
+
90
+ Returns:
91
+ OpenAIMessage: The converted :obj:`OpenAIMessage` object
92
+ with its role being "function".
93
+ """
94
+ if (not self.func_name) or (not self.result):
95
+ raise ValueError(
96
+ "Invalid request for converting into function message"
97
+ " due to missing function name or results.")
98
+
99
+ result_content = {"result": {str(self.result)}}
100
+ msg_dict: OpenAIFunctionMessage = {
101
+ "role": "function",
102
+ "name": self.func_name,
103
+ "content": f'{result_content}',
104
+ }
105
+
106
+ return msg_dict
@@ -0,0 +1,26 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ from .base_model import BaseModelBackend
15
+ from .openai_model import OpenAIModel
16
+ from .stub_model import StubModel
17
+ from .open_source_model import OpenSourceModel
18
+ from .model_factory import ModelFactory
19
+
20
+ __all__ = [
21
+ 'BaseModelBackend',
22
+ 'OpenAIModel',
23
+ 'StubModel',
24
+ 'OpenSourceModel',
25
+ 'ModelFactory',
26
+ ]
@@ -0,0 +1,110 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ from abc import ABC, abstractmethod
15
+ from typing import Any, Dict, List, Union
16
+
17
+ from openai import Stream
18
+
19
+ from camel.messages import OpenAIMessage
20
+ from camel.types import ChatCompletion, ChatCompletionChunk, ModelType
21
+ from camel.utils import BaseTokenCounter
22
+
23
+
24
+ class BaseModelBackend(ABC):
25
+ r"""Base class for different model backends.
26
+ May be OpenAI API, a local LLM, a stub for unit tests, etc.
27
+ """
28
+
29
+ def __init__(self, model_type: ModelType,
30
+ model_config_dict: Dict[str, Any]) -> None:
31
+ r"""Constructor for the model backend.
32
+
33
+ Args:
34
+ model_type (ModelType): Model for which a backend is created.
35
+ model_config_dict (Dict[str, Any]): A config dictionary.
36
+ """
37
+ self.model_type = model_type
38
+
39
+ self.model_config_dict = model_config_dict
40
+ self.check_model_config()
41
+
42
+ @property
43
+ @abstractmethod
44
+ def token_counter(self) -> BaseTokenCounter:
45
+ r"""Initialize the token counter for the model backend.
46
+
47
+ Returns:
48
+ BaseTokenCounter: The token counter following the model's
49
+ tokenization style.
50
+ """
51
+ pass
52
+
53
+ @abstractmethod
54
+ def run(
55
+ self,
56
+ messages: List[OpenAIMessage],
57
+ ) -> Union[ChatCompletion, Stream[ChatCompletionChunk]]:
58
+ r"""Runs the query to the backend model.
59
+
60
+ Args:
61
+ messages (List[OpenAIMessage]): Message list with the chat history
62
+ in OpenAI API format.
63
+
64
+ Returns:
65
+ Union[ChatCompletion, Stream[ChatCompletionChunk]]:
66
+ `ChatCompletion` in the non-stream mode, or
67
+ `Stream[ChatCompletionChunk]` in the stream mode.
68
+ """
69
+ pass
70
+
71
+ @abstractmethod
72
+ def check_model_config(self):
73
+ r"""Check whether the input model configuration contains unexpected
74
+ arguments
75
+
76
+ Raises:
77
+ ValueError: If the model configuration dictionary contains any
78
+ unexpected argument for this model class.
79
+ """
80
+ pass
81
+
82
+ def count_tokens_from_messages(self, messages: List[OpenAIMessage]) -> int:
83
+ r"""Count the number of tokens in the messages using the specific
84
+ tokenizer.
85
+
86
+ Args:
87
+ messages (List[Dict]): message list with the chat history
88
+ in OpenAI API format.
89
+
90
+ Returns:
91
+ int: Number of tokens in the messages.
92
+ """
93
+ return self.token_counter.count_tokens_from_messages(messages)
94
+
95
+ @property
96
+ def token_limit(self) -> int:
97
+ r"""Returns the maximum token limit for a given model.
98
+ Returns:
99
+ int: The maximum token limit for the given model.
100
+ """
101
+ return self.model_type.token_limit
102
+
103
+ @property
104
+ def stream(self) -> bool:
105
+ r"""Returns whether the model is in stream mode,
106
+ which sends partial results each time.
107
+ Returns:
108
+ bool: Whether the model is in stream mode.
109
+ """
110
+ return False
@@ -0,0 +1,59 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ from typing import Any, Dict
15
+
16
+ from camel.models import (
17
+ BaseModelBackend,
18
+ OpenAIModel,
19
+ OpenSourceModel,
20
+ StubModel,
21
+ )
22
+ from camel.types import ModelType
23
+
24
+
25
+ class ModelFactory:
26
+ r"""Factory of backend models.
27
+
28
+ Raises:
29
+ ValueError: in case the provided model type is unknown.
30
+ """
31
+
32
+ @staticmethod
33
+ def create(model_type: ModelType,
34
+ model_config_dict: Dict) -> BaseModelBackend:
35
+ r"""Creates an instance of `BaseModelBackend` of the specified type.
36
+
37
+ Args:
38
+ model_type (ModelType): Model for which a backend is created.
39
+ model_config_dict (Dict): A dictionary that will be fed into
40
+ the backend constructor.
41
+
42
+ Raises:
43
+ ValueError: If there is not backend for the model.
44
+
45
+ Returns:
46
+ BaseModelBackend: The initialized backend.
47
+ """
48
+ model_class: Any
49
+ if model_type.is_openai:
50
+ model_class = OpenAIModel
51
+ elif model_type == ModelType.STUB:
52
+ model_class = StubModel
53
+ elif model_type.is_open_source:
54
+ model_class = OpenSourceModel
55
+ else:
56
+ raise ValueError(f"Unknown model type `{model_type}` is input")
57
+
58
+ inst = model_class(model_type, model_config_dict)
59
+ return inst
@@ -0,0 +1,144 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ from typing import Any, Dict, List, Optional, Union
15
+
16
+ from openai import OpenAI, Stream
17
+
18
+ from camel.configs import OPENAI_API_PARAMS
19
+ from camel.messages import OpenAIMessage
20
+ from camel.models import BaseModelBackend
21
+ from camel.types import ChatCompletion, ChatCompletionChunk, ModelType
22
+ from camel.utils import BaseTokenCounter, OpenSourceTokenCounter
23
+
24
+
25
+ class OpenSourceModel(BaseModelBackend):
26
+ r"""Class for interace with OpenAI-API-compatible servers running
27
+ open-source models.
28
+ """
29
+
30
+ def __init__(
31
+ self,
32
+ model_type: ModelType,
33
+ model_config_dict: Dict[str, Any],
34
+ ) -> None:
35
+ r"""Constructor for model backends of Open-source models.
36
+
37
+ Args:
38
+ model_type (ModelType): Model for which a backend is created.
39
+ model_config_dict (Dict[str, Any]): A dictionary that will
40
+ be fed into :obj:`openai.ChatCompletion.create()`.
41
+ """
42
+ super().__init__(model_type, model_config_dict)
43
+ self._token_counter: Optional[BaseTokenCounter] = None
44
+
45
+ # Check whether the input model type is open-source
46
+ if not model_type.is_open_source:
47
+ raise ValueError(
48
+ f"Model `{model_type}` is not a supported open-source model.")
49
+
50
+ # Check whether input model path is empty
51
+ model_path: Optional[str] = (self.model_config_dict.get(
52
+ "model_path", None))
53
+ if not model_path:
54
+ raise ValueError("Path to open-source model is not provided.")
55
+ self.model_path: str = model_path
56
+
57
+ # Check whether the model name matches the model type
58
+ self.model_name: str = self.model_path.split('/')[-1]
59
+ if not self.model_type.validate_model_name(self.model_name):
60
+ raise ValueError(
61
+ f"Model name `{self.model_name}` does not match model type "
62
+ f"`{self.model_type.value}`.")
63
+
64
+ # Load the server URL and check whether it is None
65
+ server_url: Optional[str] = (self.model_config_dict.get(
66
+ "server_url", None))
67
+ if not server_url:
68
+ raise ValueError(
69
+ "URL to server running open-source LLM is not provided.")
70
+ self.server_url: str = server_url
71
+ self._client = OpenAI(
72
+ base_url=self.server_url,
73
+ timeout=60,
74
+ max_retries=3,
75
+ )
76
+
77
+ # Replace `model_config_dict` with only the params to be
78
+ # passed to OpenAI API
79
+ self.model_config_dict = self.model_config_dict["api_params"].__dict__
80
+
81
+ @property
82
+ def token_counter(self) -> BaseTokenCounter:
83
+ r"""Initialize the token counter for the model backend.
84
+
85
+ Returns:
86
+ BaseTokenCounter: The token counter following the model's
87
+ tokenization style.
88
+ """
89
+ if not self._token_counter:
90
+ self._token_counter = OpenSourceTokenCounter(
91
+ self.model_type, self.model_path)
92
+ return self._token_counter
93
+
94
+ def run(
95
+ self,
96
+ messages: List[OpenAIMessage],
97
+ ) -> Union[ChatCompletion, Stream[ChatCompletionChunk]]:
98
+ r"""Runs inference of OpenAI-API-style chat completion.
99
+
100
+ Args:
101
+ messages (List[OpenAIMessage]): Message list with the chat history
102
+ in OpenAI API format.
103
+
104
+ Returns:
105
+ Union[ChatCompletion, Stream[ChatCompletionChunk]]:
106
+ `ChatCompletion` in the non-stream mode, or
107
+ `Stream[ChatCompletionChunk]` in the stream mode.
108
+ """
109
+ messages_openai: List[OpenAIMessage] = messages
110
+ response = self._client.chat.completions.create(
111
+ messages=messages_openai,
112
+ model=self.model_name,
113
+ **self.model_config_dict,
114
+ )
115
+ return response
116
+
117
+ def check_model_config(self):
118
+ r"""Check whether the model configuration is valid for open-source
119
+ model backends.
120
+
121
+ Raises:
122
+ ValueError: If the model configuration dictionary contains any
123
+ unexpected arguments to OpenAI API, or it does not contain
124
+ :obj:`model_path` or :obj:`server_url`.
125
+ """
126
+ if ("model_path" not in self.model_config_dict
127
+ or "server_url" not in self.model_config_dict):
128
+ raise ValueError(
129
+ "Invalid configuration for open-source model backend with "
130
+ ":obj:`model_path` or :obj:`server_url` missing.")
131
+
132
+ for param in self.model_config_dict["api_params"].__dict__:
133
+ if param not in OPENAI_API_PARAMS:
134
+ raise ValueError(f"Unexpected argument `{param}` is "
135
+ "input into open-source model backend.")
136
+
137
+ @property
138
+ def stream(self) -> bool:
139
+ r"""Returns whether the model is in stream mode,
140
+ which sends partial results each time.
141
+ Returns:
142
+ bool: Whether the model is in stream mode.
143
+ """
144
+ return self.model_config_dict.get('stream', False)
@@ -0,0 +1,103 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ import os
15
+ from typing import Any, Dict, List, Optional, Union
16
+
17
+ from openai import OpenAI, Stream
18
+
19
+ from camel.configs import OPENAI_API_PARAMS_WITH_FUNCTIONS
20
+ from camel.messages import OpenAIMessage
21
+ from camel.models import BaseModelBackend
22
+ from camel.types import ChatCompletion, ChatCompletionChunk, ModelType
23
+ from camel.utils import (
24
+ BaseTokenCounter,
25
+ OpenAITokenCounter,
26
+ openai_api_key_required,
27
+ )
28
+
29
+
30
+ class OpenAIModel(BaseModelBackend):
31
+ r"""OpenAI API in a unified BaseModelBackend interface."""
32
+
33
+ def __init__(self, model_type: ModelType,
34
+ model_config_dict: Dict[str, Any]) -> None:
35
+ r"""Constructor for OpenAI backend.
36
+
37
+ Args:
38
+ model_type (ModelType): Model for which a backend is created,
39
+ one of GPT_* series.
40
+ model_config_dict (Dict[str, Any]): A dictionary that will
41
+ be fed into openai.ChatCompletion.create().
42
+ """
43
+ super().__init__(model_type, model_config_dict)
44
+ url = os.environ.get('OPENAI_API_BASE_URL', None)
45
+ self._client = OpenAI(timeout=60, max_retries=3, base_url=url)
46
+ self._token_counter: Optional[BaseTokenCounter] = None
47
+
48
+ @property
49
+ def token_counter(self) -> BaseTokenCounter:
50
+ r"""Initialize the token counter for the model backend.
51
+
52
+ Returns:
53
+ BaseTokenCounter: The token counter following the model's
54
+ tokenization style.
55
+ """
56
+ if not self._token_counter:
57
+ self._token_counter = OpenAITokenCounter(self.model_type)
58
+ return self._token_counter
59
+
60
+ @openai_api_key_required
61
+ def run(
62
+ self,
63
+ messages: List[OpenAIMessage],
64
+ ) -> Union[ChatCompletion, Stream[ChatCompletionChunk]]:
65
+ r"""Runs inference of OpenAI chat completion.
66
+
67
+ Args:
68
+ messages (List[OpenAIMessage]): Message list with the chat history
69
+ in OpenAI API format.
70
+
71
+ Returns:
72
+ Union[ChatCompletion, Stream[ChatCompletionChunk]]:
73
+ `ChatCompletion` in the non-stream mode, or
74
+ `Stream[ChatCompletionChunk]` in the stream mode.
75
+ """
76
+ response = self._client.chat.completions.create(
77
+ messages=messages,
78
+ model=self.model_type.value,
79
+ **self.model_config_dict,
80
+ )
81
+ return response
82
+
83
+ def check_model_config(self):
84
+ r"""Check whether the model configuration contains any
85
+ unexpected arguments to OpenAI API.
86
+
87
+ Raises:
88
+ ValueError: If the model configuration dictionary contains any
89
+ unexpected arguments to OpenAI API.
90
+ """
91
+ for param in self.model_config_dict:
92
+ if param not in OPENAI_API_PARAMS_WITH_FUNCTIONS:
93
+ raise ValueError(f"Unexpected argument `{param}` is "
94
+ "input into OpenAI model backend.")
95
+
96
+ @property
97
+ def stream(self) -> bool:
98
+ r"""Returns whether the model is in stream mode,
99
+ which sends partial results each time.
100
+ Returns:
101
+ bool: Whether the model is in stream mode.
102
+ """
103
+ return self.model_config_dict.get('stream', False)
@@ -0,0 +1,106 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ import time
15
+ from typing import Any, Dict, List, Optional, Union
16
+
17
+ from openai import Stream
18
+
19
+ from camel.messages import OpenAIMessage
20
+ from camel.models import BaseModelBackend
21
+ from camel.types import (
22
+ ChatCompletion,
23
+ ChatCompletionChunk,
24
+ ChatCompletionMessage,
25
+ Choice,
26
+ CompletionUsage,
27
+ ModelType,
28
+ )
29
+ from camel.utils import BaseTokenCounter
30
+
31
+
32
+ class StubTokenCounter(BaseTokenCounter):
33
+
34
+ def count_tokens_from_messages(self, messages: List[OpenAIMessage]) -> int:
35
+ r"""Token counting for STUB models, directly returning a constant.
36
+
37
+ Args:
38
+ messages (List[OpenAIMessage]): Message list with the chat history
39
+ in OpenAI API format.
40
+
41
+ Returns:
42
+ int: A constant to act as the number of the tokens in the
43
+ messages.
44
+ """
45
+ return 10
46
+
47
+
48
+ class StubModel(BaseModelBackend):
49
+ r"""A dummy model used for unit tests."""
50
+ model_type = ModelType.STUB
51
+
52
+ def __init__(self, model_type: ModelType,
53
+ model_config_dict: Dict[str, Any]) -> None:
54
+ r"""All arguments are unused for the dummy model."""
55
+ super().__init__(model_type, model_config_dict)
56
+ self._token_counter: Optional[BaseTokenCounter] = None
57
+
58
+ @property
59
+ def token_counter(self) -> BaseTokenCounter:
60
+ r"""Initialize the token counter for the model backend.
61
+
62
+ Returns:
63
+ BaseTokenCounter: The token counter following the model's
64
+ tokenization style.
65
+ """
66
+ if not self._token_counter:
67
+ self._token_counter = StubTokenCounter()
68
+ return self._token_counter
69
+
70
+ def run(
71
+ self, messages: List[OpenAIMessage]
72
+ ) -> Union[ChatCompletion, Stream[ChatCompletionChunk]]:
73
+ r"""Run fake inference by returning a fixed string.
74
+ All arguments are unused for the dummy model.
75
+
76
+ Returns:
77
+ Dict[str, Any]: Response in the OpenAI API format.
78
+ """
79
+ ARBITRARY_STRING = "Lorem Ipsum"
80
+ response: ChatCompletion = ChatCompletion(
81
+ id="stub_model_id",
82
+ model="stub",
83
+ object="chat.completion",
84
+ created=int(time.time()),
85
+ choices=[
86
+ Choice(
87
+ finish_reason="stop",
88
+ index=0,
89
+ message=ChatCompletionMessage(
90
+ content=ARBITRARY_STRING,
91
+ role="assistant",
92
+ ),
93
+ )
94
+ ],
95
+ usage=CompletionUsage(
96
+ completion_tokens=10,
97
+ prompt_tokens=10,
98
+ total_tokens=20,
99
+ ),
100
+ )
101
+ return response
102
+
103
+ def check_model_config(self):
104
+ r"""Directly pass the check on arguments to STUB model.
105
+ """
106
+ pass