camel-ai 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of camel-ai might be problematic. Click here for more details.

Files changed (75) hide show
  1. camel/__init__.py +30 -0
  2. camel/agents/__init__.py +40 -0
  3. camel/agents/base.py +29 -0
  4. camel/agents/chat_agent.py +539 -0
  5. camel/agents/critic_agent.py +179 -0
  6. camel/agents/embodied_agent.py +138 -0
  7. camel/agents/role_assignment_agent.py +117 -0
  8. camel/agents/task_agent.py +382 -0
  9. camel/agents/tool_agents/__init__.py +20 -0
  10. camel/agents/tool_agents/base.py +40 -0
  11. camel/agents/tool_agents/hugging_face_tool_agent.py +203 -0
  12. camel/configs.py +159 -0
  13. camel/embeddings/__init__.py +20 -0
  14. camel/embeddings/base.py +65 -0
  15. camel/embeddings/openai_embedding.py +74 -0
  16. camel/functions/__init__.py +27 -0
  17. camel/functions/base_io_functions.py +261 -0
  18. camel/functions/math_functions.py +61 -0
  19. camel/functions/openai_function.py +88 -0
  20. camel/functions/search_functions.py +309 -0
  21. camel/functions/unstructured_io_fuctions.py +616 -0
  22. camel/functions/weather_functions.py +136 -0
  23. camel/generators.py +263 -0
  24. camel/human.py +130 -0
  25. camel/memories/__init__.py +28 -0
  26. camel/memories/base.py +75 -0
  27. camel/memories/chat_history_memory.py +111 -0
  28. camel/memories/context_creators/__init__.py +18 -0
  29. camel/memories/context_creators/base.py +72 -0
  30. camel/memories/context_creators/score_based.py +130 -0
  31. camel/memories/records.py +92 -0
  32. camel/messages/__init__.py +38 -0
  33. camel/messages/base.py +223 -0
  34. camel/messages/func_message.py +106 -0
  35. camel/models/__init__.py +26 -0
  36. camel/models/base_model.py +110 -0
  37. camel/models/model_factory.py +59 -0
  38. camel/models/open_source_model.py +144 -0
  39. camel/models/openai_model.py +103 -0
  40. camel/models/stub_model.py +106 -0
  41. camel/prompts/__init__.py +38 -0
  42. camel/prompts/ai_society.py +121 -0
  43. camel/prompts/base.py +227 -0
  44. camel/prompts/code.py +111 -0
  45. camel/prompts/evaluation.py +40 -0
  46. camel/prompts/misalignment.py +84 -0
  47. camel/prompts/prompt_templates.py +117 -0
  48. camel/prompts/role_description_prompt_template.py +53 -0
  49. camel/prompts/solution_extraction.py +44 -0
  50. camel/prompts/task_prompt_template.py +56 -0
  51. camel/prompts/translation.py +42 -0
  52. camel/responses/__init__.py +18 -0
  53. camel/responses/agent_responses.py +42 -0
  54. camel/societies/__init__.py +20 -0
  55. camel/societies/babyagi_playing.py +254 -0
  56. camel/societies/role_playing.py +456 -0
  57. camel/storages/__init__.py +23 -0
  58. camel/storages/key_value_storages/__init__.py +23 -0
  59. camel/storages/key_value_storages/base.py +57 -0
  60. camel/storages/key_value_storages/in_memory.py +51 -0
  61. camel/storages/key_value_storages/json.py +97 -0
  62. camel/terminators/__init__.py +23 -0
  63. camel/terminators/base.py +44 -0
  64. camel/terminators/response_terminator.py +118 -0
  65. camel/terminators/token_limit_terminator.py +55 -0
  66. camel/types/__init__.py +54 -0
  67. camel/types/enums.py +176 -0
  68. camel/types/openai_types.py +39 -0
  69. camel/utils/__init__.py +47 -0
  70. camel/utils/commons.py +243 -0
  71. camel/utils/python_interpreter.py +435 -0
  72. camel/utils/token_counting.py +220 -0
  73. camel_ai-0.1.1.dist-info/METADATA +311 -0
  74. camel_ai-0.1.1.dist-info/RECORD +75 -0
  75. camel_ai-0.1.1.dist-info/WHEEL +4 -0
@@ -0,0 +1,111 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ from typing import List, Optional, Tuple
15
+
16
+ from camel.memories import BaseMemory, ContextRecord, MemoryRecord
17
+ from camel.memories.context_creators import BaseContextCreator
18
+ from camel.messages import OpenAIMessage
19
+ from camel.storages import BaseKeyValueStorage, InMemoryKeyValueStorage
20
+ from camel.types import OpenAIBackendRole
21
+
22
+
23
+ class ChatHistoryMemory(BaseMemory):
24
+ r"""An implementation of the :obj:`BaseMemory` abstract base class for
25
+ maintaining a record of chat histories.
26
+
27
+ This memory class helps manage conversation histories with a designated
28
+ storage mechanism, either provided by the user or using a default
29
+ in-memory storage. It offers a windowed approach to retrieving chat
30
+ histories, allowing users to specify how many recent messages they'd
31
+ like to fetch.
32
+
33
+ `ChatHistoryMemory` requires messages to be stored with certain
34
+ metadata (e.g., `role_at_backend`) to maintain consistency and validate
35
+ the chat history.
36
+
37
+ Args:
38
+ context_creator (BaseContextCreator): A context creator contianing
39
+ the context limit and the message pruning strategy.
40
+ storage (BaseKeyValueStorage, optional): A storage mechanism for
41
+ storing chat history. If `None`, an :obj:`InMemoryKeyValueStorage`
42
+ will be used. (default: :obj:`None`)
43
+ window_size (int, optional): Specifies the number of recent chat
44
+ messages to retrieve. If not provided, the entire chat history
45
+ will be retrieved. (default: :obj:`None`)
46
+ """
47
+
48
+ def __init__(
49
+ self,
50
+ context_creator: BaseContextCreator,
51
+ storage: Optional[BaseKeyValueStorage] = None,
52
+ window_size: Optional[int] = None,
53
+ ) -> None:
54
+ self.context_creator = context_creator
55
+ self.storage = storage or InMemoryKeyValueStorage()
56
+ self.window_size = window_size
57
+
58
+ def get_context(self) -> Tuple[List[OpenAIMessage], int]:
59
+ r"""Gets chat context with a proper size for the agent from the memory
60
+ based on the window size or fetches the entire chat history if no
61
+ window size is specified.
62
+
63
+ Returns:
64
+ (List[OpenAIMessage], int): A tuple containing the constructed
65
+ context in OpenAIMessage format and the total token count.
66
+ Raises:
67
+ ValueError: If the memory is empty or if the first message in the
68
+ memory is not a system message.
69
+ """
70
+ record_dicts = self.storage.load()
71
+ if len(record_dicts) == 0:
72
+ raise ValueError("The `ChatHistoryMemory` is empty.")
73
+
74
+ chat_records: List[MemoryRecord] = []
75
+ truncate_idx = -self.window_size if self.window_size is not None else 0
76
+ for record_dict in record_dicts[truncate_idx:]:
77
+ chat_records.append(MemoryRecord.from_dict(record_dict))
78
+
79
+ # We assume that, in the chat history memory, the closer the record is
80
+ # to the current message, the more score it will be.
81
+ output_records = []
82
+ score = 1.0
83
+ for record in reversed(chat_records):
84
+ if record.role_at_backend == OpenAIBackendRole.SYSTEM:
85
+ # System messages are always kept.
86
+ output_records.append(ContextRecord(record, 1.0))
87
+ else:
88
+ # Other messages' score drops down gradually
89
+ score *= 0.99
90
+ output_records.append(ContextRecord(record, score))
91
+
92
+ output_records.reverse()
93
+ return self.context_creator.create_context(output_records)
94
+
95
+ def write_records(self, records: List[MemoryRecord]) -> None:
96
+ r"""Writes memory records to the memory. Additionally, performs
97
+ validation checks on the messages.
98
+
99
+ Args:
100
+ records (List[MemoryRecord]): Memory records to be added to the
101
+ memory.
102
+ """
103
+ stored_records = []
104
+ for record in records:
105
+ stored_records.append(record.to_dict())
106
+ self.storage.save(stored_records)
107
+
108
+ def clear(self) -> None:
109
+ r"""Clears all chat messages from the memory.
110
+ """
111
+ self.storage.clear()
@@ -0,0 +1,18 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+
15
+ from .base import BaseContextCreator
16
+ from .score_based import ScoreBasedContextCreator
17
+
18
+ __all__ = ['BaseContextCreator', 'ScoreBasedContextCreator']
@@ -0,0 +1,72 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ from abc import ABC, abstractmethod
15
+ from typing import List, Tuple
16
+
17
+ from camel.memories import ContextRecord
18
+ from camel.messages import OpenAIMessage
19
+ from camel.utils import BaseTokenCounter
20
+
21
+
22
+ class BaseContextCreator(ABC):
23
+ r"""An abstract base class defining the interface for context creation
24
+ strategies.
25
+
26
+ This class provides a foundational structure for different strategies to
27
+ generate conversational context from a list of context records. The
28
+ primary goal is to create a context that is aligned with a specified token
29
+ count limit, allowing subclasses to define their specific approach.
30
+
31
+ Subclasses should implement the `token_counter`, `token_limit`, and
32
+ `create_context` methods to provide specific context creation logic.
33
+
34
+ Attributes:
35
+ token_counter (BaseTokenCounter): A token counter instance responsible
36
+ for counting tokens in a message.
37
+ token_limit (int): The maximum number of tokens allowed in the
38
+ generated context.
39
+ """
40
+
41
+ @property
42
+ @abstractmethod
43
+ def token_counter(self) -> BaseTokenCounter:
44
+ pass
45
+
46
+ @property
47
+ @abstractmethod
48
+ def token_limit(self) -> int:
49
+ pass
50
+
51
+ @abstractmethod
52
+ def create_context(
53
+ self,
54
+ records: List[ContextRecord],
55
+ ) -> Tuple[List[OpenAIMessage], int]:
56
+ r"""An abstract method to create conversational context from the chat
57
+ history.
58
+
59
+ Constructs the context from provided records. The specifics of how this
60
+ is done and how the token count is managed should be provided by
61
+ subclasses implementing this method. The the output messages order
62
+ should keep same as the input order.
63
+
64
+ Args:
65
+ records (List[ContextRecord]): A list of context records from
66
+ which to generate the context.
67
+
68
+ Returns:
69
+ Tuple[List[OpenAIMessage], int]: A tuple containing the constructed
70
+ context in OpenAIMessage format and the total token count.
71
+ """
72
+ pass
@@ -0,0 +1,130 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ from dataclasses import dataclass
15
+ from typing import List, Tuple
16
+
17
+ from camel.memories import ContextRecord
18
+ from camel.memories.context_creators import BaseContextCreator
19
+ from camel.messages import OpenAIMessage
20
+ from camel.utils import BaseTokenCounter
21
+
22
+
23
+ @dataclass(frozen=True)
24
+ class _ContextUnit:
25
+ idx: int
26
+ record: ContextRecord
27
+ num_tokens: int
28
+
29
+
30
+ class ScoreBasedContextCreator(BaseContextCreator):
31
+ r"""A default implementation of context creation strategy, which inherits
32
+ from :obj:`BaseContextCreator`.
33
+
34
+ This class provides a strategy to generate a conversational context from
35
+ a list of chat history records while ensuring the total token count of
36
+ the context does not exceed a specified limit. It prunes messages based
37
+ on their score if the total token count exceeds the limit.
38
+
39
+ Args:
40
+ token_counter (BaseTokenCounter): An instance responsible for counting
41
+ tokens in a message.
42
+ token_limit (int): The maximum number of tokens allowed in the
43
+ generated context.
44
+ """
45
+
46
+ def __init__(self, token_counter: BaseTokenCounter,
47
+ token_limit: int) -> None:
48
+ self._token_counter = token_counter
49
+ self._token_limit = token_limit
50
+
51
+ @property
52
+ def token_counter(self) -> BaseTokenCounter:
53
+ return self._token_counter
54
+
55
+ @property
56
+ def token_limit(self) -> int:
57
+ return self._token_limit
58
+
59
+ def create_context(
60
+ self,
61
+ records: List[ContextRecord],
62
+ ) -> Tuple[List[OpenAIMessage], int]:
63
+ r"""Creates conversational context from chat history while respecting
64
+ token limits.
65
+
66
+ Constructs the context from provided records and ensures that the total
67
+ token count does not exceed the specified limit by pruning the least
68
+ score messages if necessary.
69
+
70
+ Args:
71
+ records (List[ContextRecord]): A list of message records from which
72
+ to generate the context.
73
+
74
+ Returns:
75
+ Tuple[List[OpenAIMessage], int]: A tuple containing the constructed
76
+ context in OpenAIMessage format and the total token count.
77
+
78
+ Raises:
79
+ RuntimeError: If it's impossible to create a valid context without
80
+ exceeding the token limit.
81
+ """
82
+ context_units = [
83
+ _ContextUnit(
84
+ idx,
85
+ record,
86
+ self.token_counter.count_tokens_from_messages(
87
+ [record.memory_record.to_openai_message()]),
88
+ ) for idx, record in enumerate(records)
89
+ ]
90
+ # TODO: optimize the process, may give information back to memory
91
+
92
+ # If not exceed token limit, simply return
93
+ total_tokens = sum([unit.num_tokens for unit in context_units])
94
+ if total_tokens <= self.token_limit:
95
+ return self._create_output(context_units)
96
+
97
+ # Sort by score
98
+ context_units = sorted(context_units,
99
+ key=lambda unit: unit.record.score)
100
+
101
+ # Remove least score messages until total token number is smaller
102
+ # than token limit
103
+ truncate_idx = None
104
+ for i, unit in enumerate(context_units):
105
+ if unit.record.score == 1:
106
+ raise RuntimeError(
107
+ "Cannot create context: exceed token limit.", total_tokens)
108
+ total_tokens -= unit.num_tokens
109
+ if total_tokens <= self.token_limit:
110
+ truncate_idx = i
111
+ break
112
+ if truncate_idx is None:
113
+ raise RuntimeError("Cannot create context: exceed token limit.",
114
+ total_tokens)
115
+ return self._create_output(context_units[truncate_idx + 1:])
116
+
117
+ def _create_output(
118
+ self, context_units: List[_ContextUnit]
119
+ ) -> Tuple[List[OpenAIMessage], int]:
120
+ r"""Helper method to generate output from context units.
121
+
122
+ This method converts the provided context units into a format suitable
123
+ for output, specifically a list of OpenAIMessages and an integer
124
+ representing the total token count.
125
+ """
126
+ context_units = sorted(context_units, key=lambda unit: unit.idx)
127
+ return [
128
+ unit.record.memory_record.to_openai_message()
129
+ for unit in context_units
130
+ ], sum([unit.num_tokens for unit in context_units])
@@ -0,0 +1,92 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+
15
+ from dataclasses import asdict, dataclass, field
16
+ from typing import Any, ClassVar, Dict
17
+ from uuid import UUID, uuid4
18
+
19
+ from camel.messages import BaseMessage, FunctionCallingMessage, OpenAIMessage
20
+ from camel.types import OpenAIBackendRole
21
+
22
+
23
+ @dataclass(frozen=True)
24
+ class MemoryRecord:
25
+ r"""The basic message storing unit in the CAMEL memory system.
26
+
27
+ Attributes:
28
+ message (BaseMessage): The main content of the record.
29
+ role_at_backend (OpenAIBackendRole): An enumeration value representing
30
+ the role this message played at the OpenAI backend. Note that this
31
+ value is different from the :obj:`RoleType` used in the CAMEL role
32
+ playing system.
33
+ uuid (UUID, optional): A universally unique identifier for this record.
34
+ This is used to uniquely identify this record in the memory system.
35
+ If not given, it will be assigned with a random UUID.
36
+ extra_info (Dict[str, str], optional): A dictionary of additional
37
+ key-value pairs that provide more information. If not given, it
38
+ will be an empty `Dict`.
39
+ """
40
+ message: BaseMessage
41
+ role_at_backend: OpenAIBackendRole
42
+ uuid: UUID = field(default_factory=uuid4)
43
+ extra_info: Dict[str, str] = field(default_factory=dict)
44
+
45
+ _MESSAGE_TYPES: ClassVar[dict] = {
46
+ "BaseMessage": BaseMessage,
47
+ "FunctionCallingMessage": FunctionCallingMessage
48
+ }
49
+
50
+ @classmethod
51
+ def from_dict(cls, record_dict: Dict[str, Any]) -> "MemoryRecord":
52
+ r"""Reconstruct a :obj:`MemoryRecord` from the input dict.
53
+
54
+ Args:
55
+ record_dict(Dict[str, Any]): A dict generated by :meth:`to_dict`.
56
+ """
57
+ message_cls = cls._MESSAGE_TYPES[record_dict["message"]["__class__"]]
58
+ kwargs: Dict = record_dict["message"].copy()
59
+ kwargs.pop("__class__")
60
+ reconstructed_message = message_cls(**kwargs)
61
+ return cls(
62
+ uuid=UUID(record_dict["uuid"]),
63
+ message=reconstructed_message,
64
+ role_at_backend=record_dict["role_at_backend"],
65
+ extra_info=record_dict["extra_info"],
66
+ )
67
+
68
+ def to_dict(self) -> Dict[str, Any]:
69
+ r"""Convert the :obj:`MemoryRecord` to a dict for serialization
70
+ purposes.
71
+ """
72
+ return {
73
+ "uuid": str(self.uuid),
74
+ "message": {
75
+ "__class__": self.message.__class__.__name__,
76
+ **asdict(self.message)
77
+ },
78
+ "role_at_backend": self.role_at_backend,
79
+ "extra_info": self.extra_info
80
+ }
81
+
82
+ def to_openai_message(self) -> OpenAIMessage:
83
+ r"""Converts the record to an :obj:`OpenAIMessage` object."""
84
+ return self.message.to_openai_message(self.role_at_backend)
85
+
86
+
87
+ @dataclass(frozen=True)
88
+ class ContextRecord:
89
+ r"""The result of memory retrieving.
90
+ """
91
+ memory_record: MemoryRecord
92
+ score: float
@@ -0,0 +1,38 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ from camel.types import (
15
+ ChatCompletionSystemMessageParam,
16
+ ChatCompletionAssistantMessageParam,
17
+ ChatCompletionUserMessageParam,
18
+ ChatCompletionFunctionMessageParam,
19
+ ChatCompletionMessageParam,
20
+ )
21
+
22
+ OpenAISystemMessage = ChatCompletionSystemMessageParam
23
+ OpenAIAssistantMessage = ChatCompletionAssistantMessageParam
24
+ OpenAIUserMessage = ChatCompletionUserMessageParam
25
+ OpenAIFunctionMessage = ChatCompletionFunctionMessageParam
26
+ OpenAIMessage = ChatCompletionMessageParam
27
+
28
+ from .base import BaseMessage # noqa: E402
29
+ from .func_message import FunctionCallingMessage # noqa: E402
30
+
31
+ __all__ = [
32
+ 'OpenAISystemMessage',
33
+ 'OpenAIAssistantMessage',
34
+ 'OpenAIUserMessage',
35
+ 'OpenAIMessage',
36
+ 'BaseMessage',
37
+ 'FunctionCallingMessage',
38
+ ]
camel/messages/base.py ADDED
@@ -0,0 +1,223 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ from dataclasses import dataclass
15
+ from typing import Any, Dict, List, Optional, Tuple, Union
16
+
17
+ from camel.messages import (
18
+ OpenAIAssistantMessage,
19
+ OpenAIMessage,
20
+ OpenAISystemMessage,
21
+ OpenAIUserMessage,
22
+ )
23
+ from camel.prompts import CodePrompt, TextPrompt
24
+ from camel.types import OpenAIBackendRole, RoleType
25
+
26
+
27
+ @dataclass
28
+ class BaseMessage:
29
+ r"""Base class for message objects used in CAMEL chat system.
30
+
31
+ Args:
32
+ role_name (str): The name of the user or assistant role.
33
+ role_type (RoleType): The type of role, either
34
+ :obj:`RoleType.ASSISTANT` or :obj:`RoleType.USER`.
35
+ meta_dict (Optional[Dict[str, str]]): Additional metadata dictionary
36
+ for the message.
37
+ content (str): The content of the message.
38
+ """
39
+ role_name: str
40
+ role_type: RoleType
41
+ meta_dict: Optional[Dict[str, str]]
42
+ content: str
43
+
44
+ @classmethod
45
+ def make_user_message(
46
+ cls, role_name: str, content: str,
47
+ meta_dict: Optional[Dict[str, str]] = None) -> 'BaseMessage':
48
+ return cls(role_name, RoleType.USER, meta_dict, content)
49
+
50
+ @classmethod
51
+ def make_assistant_message(
52
+ cls, role_name: str, content: str,
53
+ meta_dict: Optional[Dict[str, str]] = None) -> 'BaseMessage':
54
+ return cls(role_name, RoleType.ASSISTANT, meta_dict, content)
55
+
56
+ def create_new_instance(self, content: str) -> "BaseMessage":
57
+ r"""Create a new instance of the :obj:`BaseMessage` with updated
58
+ content.
59
+
60
+ Args:
61
+ content (str): The new content value.
62
+
63
+ Returns:
64
+ BaseMessage: The new instance of :obj:`BaseMessage`.
65
+ """
66
+ return self.__class__(role_name=self.role_name,
67
+ role_type=self.role_type,
68
+ meta_dict=self.meta_dict, content=content)
69
+
70
+ def __add__(self, other: Any) -> Union["BaseMessage", Any]:
71
+ r"""Addition operator override for :obj:`BaseMessage`.
72
+
73
+ Args:
74
+ other (Any): The value to be added with.
75
+
76
+ Returns:
77
+ Union[BaseMessage, Any]: The result of the addition.
78
+ """
79
+ if isinstance(other, BaseMessage):
80
+ combined_content = self.content.__add__(other.content)
81
+ elif isinstance(other, str):
82
+ combined_content = self.content.__add__(other)
83
+ else:
84
+ raise TypeError(
85
+ f"Unsupported operand type(s) for +: '{type(self)}' and "
86
+ f"'{type(other)}'")
87
+ return self.create_new_instance(combined_content)
88
+
89
+ def __mul__(self, other: Any) -> Union["BaseMessage", Any]:
90
+ r"""Multiplication operator override for :obj:`BaseMessage`.
91
+
92
+ Args:
93
+ other (Any): The value to be multiplied with.
94
+
95
+ Returns:
96
+ Union[BaseMessage, Any]: The result of the multiplication.
97
+ """
98
+ if isinstance(other, int):
99
+ multiplied_content = self.content.__mul__(other)
100
+ return self.create_new_instance(multiplied_content)
101
+ else:
102
+ raise TypeError(
103
+ f"Unsupported operand type(s) for *: '{type(self)}' and "
104
+ f"'{type(other)}'")
105
+
106
+ def __len__(self) -> int:
107
+ r"""Length operator override for :obj:`BaseMessage`.
108
+
109
+ Returns:
110
+ int: The length of the content.
111
+ """
112
+ return len(self.content)
113
+
114
+ def __contains__(self, item: str) -> bool:
115
+ r"""Contains operator override for :obj:`BaseMessage`.
116
+
117
+ Args:
118
+ item (str): The item to check for containment.
119
+
120
+ Returns:
121
+ bool: :obj:`True` if the item is contained in the content,
122
+ :obj:`False` otherwise.
123
+ """
124
+ return item in self.content
125
+
126
+ def extract_text_and_code_prompts(
127
+ self) -> Tuple[List[TextPrompt], List[CodePrompt]]:
128
+ r"""Extract text and code prompts from the message content.
129
+
130
+ Returns:
131
+ Tuple[List[TextPrompt], List[CodePrompt]]: A tuple containing a
132
+ list of text prompts and a list of code prompts extracted
133
+ from the content.
134
+ """
135
+ text_prompts: List[TextPrompt] = []
136
+ code_prompts: List[CodePrompt] = []
137
+
138
+ lines = self.content.split("\n")
139
+ idx = 0
140
+ start_idx = 0
141
+ while idx < len(lines):
142
+ while idx < len(lines) and (
143
+ not lines[idx].lstrip().startswith("```")):
144
+ idx += 1
145
+ text = "\n".join(lines[start_idx:idx]).strip()
146
+ text_prompts.append(TextPrompt(text))
147
+
148
+ if idx >= len(lines):
149
+ break
150
+
151
+ code_type = lines[idx].strip()[3:].strip()
152
+ idx += 1
153
+ start_idx = idx
154
+ while not lines[idx].lstrip().startswith("```"):
155
+ idx += 1
156
+ code = "\n".join(lines[start_idx:idx]).strip()
157
+ code_prompts.append(CodePrompt(code, code_type=code_type))
158
+
159
+ idx += 1
160
+ start_idx = idx
161
+
162
+ return text_prompts, code_prompts
163
+
164
+ def to_openai_message(
165
+ self,
166
+ role_at_backend: OpenAIBackendRole,
167
+ ) -> OpenAIMessage:
168
+ r"""Converts the message to an :obj:`OpenAIMessage` object.
169
+
170
+ Args:
171
+ role_at_backend (OpenAIBackendRole): The role of the message in
172
+ OpenAI chat system.
173
+
174
+ Returns:
175
+ OpenAIMessage: The converted :obj:`OpenAIMessage` object.
176
+ """
177
+ if role_at_backend == OpenAIBackendRole.SYSTEM:
178
+ return self.to_openai_system_message()
179
+ elif role_at_backend == OpenAIBackendRole.USER:
180
+ return self.to_openai_user_message()
181
+ elif role_at_backend == OpenAIBackendRole.ASSISTANT:
182
+ return self.to_openai_assistant_message()
183
+ else:
184
+ raise ValueError(f"Unsupported role: {role_at_backend}.")
185
+
186
+ def to_openai_system_message(self) -> OpenAISystemMessage:
187
+ r"""Converts the message to an :obj:`OpenAISystemMessage` object.
188
+
189
+ Returns:
190
+ OpenAISystemMessage: The converted :obj:`OpenAISystemMessage`
191
+ object.
192
+ """
193
+ return {"role": "system", "content": self.content}
194
+
195
+ def to_openai_user_message(self) -> OpenAIUserMessage:
196
+ r"""Converts the message to an :obj:`OpenAIUserMessage` object.
197
+
198
+ Returns:
199
+ OpenAIUserMessage: The converted :obj:`OpenAIUserMessage` object.
200
+ """
201
+ return {"role": "user", "content": self.content}
202
+
203
+ def to_openai_assistant_message(self) -> OpenAIAssistantMessage:
204
+ r"""Converts the message to an :obj:`OpenAIAssistantMessage` object.
205
+
206
+ Returns:
207
+ OpenAIAssistantMessage: The converted :obj:`OpenAIAssistantMessage`
208
+ object.
209
+ """
210
+ return {"role": "assistant", "content": self.content}
211
+
212
+ def to_dict(self) -> Dict:
213
+ r"""Converts the message to a dictionary.
214
+
215
+ Returns:
216
+ dict: The converted dictionary.
217
+ """
218
+ return {
219
+ "role_name": self.role_name,
220
+ "role_type": self.role_type.name,
221
+ **(self.meta_dict or {}),
222
+ "content": self.content,
223
+ }