camel-ai 0.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of camel-ai might be problematic. Click here for more details.

Files changed (75) hide show
  1. camel/__init__.py +30 -0
  2. camel/agents/__init__.py +40 -0
  3. camel/agents/base.py +29 -0
  4. camel/agents/chat_agent.py +539 -0
  5. camel/agents/critic_agent.py +179 -0
  6. camel/agents/embodied_agent.py +138 -0
  7. camel/agents/role_assignment_agent.py +117 -0
  8. camel/agents/task_agent.py +382 -0
  9. camel/agents/tool_agents/__init__.py +20 -0
  10. camel/agents/tool_agents/base.py +40 -0
  11. camel/agents/tool_agents/hugging_face_tool_agent.py +203 -0
  12. camel/configs.py +159 -0
  13. camel/embeddings/__init__.py +20 -0
  14. camel/embeddings/base.py +65 -0
  15. camel/embeddings/openai_embedding.py +74 -0
  16. camel/functions/__init__.py +27 -0
  17. camel/functions/base_io_functions.py +261 -0
  18. camel/functions/math_functions.py +61 -0
  19. camel/functions/openai_function.py +88 -0
  20. camel/functions/search_functions.py +309 -0
  21. camel/functions/unstructured_io_fuctions.py +616 -0
  22. camel/functions/weather_functions.py +136 -0
  23. camel/generators.py +263 -0
  24. camel/human.py +130 -0
  25. camel/memories/__init__.py +28 -0
  26. camel/memories/base.py +75 -0
  27. camel/memories/chat_history_memory.py +111 -0
  28. camel/memories/context_creators/__init__.py +18 -0
  29. camel/memories/context_creators/base.py +72 -0
  30. camel/memories/context_creators/score_based.py +130 -0
  31. camel/memories/records.py +92 -0
  32. camel/messages/__init__.py +38 -0
  33. camel/messages/base.py +223 -0
  34. camel/messages/func_message.py +106 -0
  35. camel/models/__init__.py +26 -0
  36. camel/models/base_model.py +110 -0
  37. camel/models/model_factory.py +59 -0
  38. camel/models/open_source_model.py +144 -0
  39. camel/models/openai_model.py +103 -0
  40. camel/models/stub_model.py +106 -0
  41. camel/prompts/__init__.py +38 -0
  42. camel/prompts/ai_society.py +121 -0
  43. camel/prompts/base.py +227 -0
  44. camel/prompts/code.py +111 -0
  45. camel/prompts/evaluation.py +40 -0
  46. camel/prompts/misalignment.py +84 -0
  47. camel/prompts/prompt_templates.py +117 -0
  48. camel/prompts/role_description_prompt_template.py +53 -0
  49. camel/prompts/solution_extraction.py +44 -0
  50. camel/prompts/task_prompt_template.py +56 -0
  51. camel/prompts/translation.py +42 -0
  52. camel/responses/__init__.py +18 -0
  53. camel/responses/agent_responses.py +42 -0
  54. camel/societies/__init__.py +20 -0
  55. camel/societies/babyagi_playing.py +254 -0
  56. camel/societies/role_playing.py +456 -0
  57. camel/storages/__init__.py +23 -0
  58. camel/storages/key_value_storages/__init__.py +23 -0
  59. camel/storages/key_value_storages/base.py +57 -0
  60. camel/storages/key_value_storages/in_memory.py +51 -0
  61. camel/storages/key_value_storages/json.py +97 -0
  62. camel/terminators/__init__.py +23 -0
  63. camel/terminators/base.py +44 -0
  64. camel/terminators/response_terminator.py +118 -0
  65. camel/terminators/token_limit_terminator.py +55 -0
  66. camel/types/__init__.py +54 -0
  67. camel/types/enums.py +176 -0
  68. camel/types/openai_types.py +39 -0
  69. camel/utils/__init__.py +47 -0
  70. camel/utils/commons.py +243 -0
  71. camel/utils/python_interpreter.py +435 -0
  72. camel/utils/token_counting.py +220 -0
  73. camel_ai-0.1.1.dist-info/METADATA +311 -0
  74. camel_ai-0.1.1.dist-info/RECORD +75 -0
  75. camel_ai-0.1.1.dist-info/WHEEL +4 -0
@@ -0,0 +1,97 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+
15
+ import json
16
+ from pathlib import Path
17
+ from typing import Any, Dict, List, Optional
18
+
19
+ from camel.storages.key_value_storages import BaseKeyValueStorage
20
+ from camel.types import (
21
+ ModelType,
22
+ OpenAIBackendRole,
23
+ RoleType,
24
+ TaskType,
25
+ VectorDistance,
26
+ )
27
+
28
+
29
+ class _CamelJSONEncoder(json.JSONEncoder):
30
+ r"""A custom JSON encoder for serializing specifically enumerated types.
31
+ Ensures enumerated types can be stored in and retrieved from JSON format.
32
+ """
33
+ CAMEL_ENUMS = {
34
+ "RoleType": RoleType,
35
+ "TaskType": TaskType,
36
+ "ModelType": ModelType,
37
+ "OpenAIBackendRole": OpenAIBackendRole,
38
+ "VectorDistance": VectorDistance,
39
+ }
40
+
41
+ def default(self, obj) -> Any:
42
+ if type(obj) in self.CAMEL_ENUMS.values():
43
+ return {"__enum__": str(obj)}
44
+ # Let the base class default method raise the TypeError
45
+ return json.JSONEncoder.default(self, obj)
46
+
47
+
48
+ class JsonStorage(BaseKeyValueStorage):
49
+ r"""A concrete implementation of the :obj:`BaseKeyValueStorage` using JSON
50
+ files. Allows for persistent storage of records in a human-readable format.
51
+
52
+ Args:
53
+ path (Path, optional): Path to the desired JSON file. If `None`, a
54
+ default path `./chat_history.json` will be used.
55
+ (default: :obj:`None`)
56
+ """
57
+
58
+ def __init__(self, path: Optional[Path] = None) -> None:
59
+ self.json_path = path or Path("./chat_history.json")
60
+ self.json_path.touch()
61
+
62
+ def _json_object_hook(self, d) -> Any:
63
+ if "__enum__" in d:
64
+ name, member = d["__enum__"].split(".")
65
+ return getattr(_CamelJSONEncoder.CAMEL_ENUMS[name], member)
66
+ else:
67
+ return d
68
+
69
+ def save(self, records: List[Dict[str, Any]]) -> None:
70
+ r"""Saves a batch of records to the key-value storage system.
71
+
72
+ Args:
73
+ records (List[Dict[str, Any]]): A list of dictionaries, where each
74
+ dictionary represents a unique record to be stored.
75
+ """
76
+ with self.json_path.open("a") as f:
77
+ f.writelines(
78
+ [json.dumps(r, cls=_CamelJSONEncoder) + "\n" for r in records])
79
+
80
+ def load(self) -> List[Dict[str, Any]]:
81
+ r"""Loads all stored records from the key-value storage system.
82
+
83
+ Returns:
84
+ List[Dict[str, Any]]: A list of dictionaries, where each dictionary
85
+ represents a stored record.
86
+ """
87
+ with self.json_path.open("r") as f:
88
+ return [
89
+ json.loads(r, object_hook=self._json_object_hook)
90
+ for r in f.readlines()
91
+ ]
92
+
93
+ def clear(self) -> None:
94
+ r"""Removes all records from the key-value storage system.
95
+ """
96
+ with self.json_path.open("w"):
97
+ pass
@@ -0,0 +1,23 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ from .base import BaseTerminator
15
+ from .response_terminator import ResponseWordsTerminator, ResponseTerminator
16
+ from .token_limit_terminator import TokenLimitTerminator
17
+
18
+ __all__ = [
19
+ 'BaseTerminator',
20
+ 'ResponseTerminator',
21
+ 'ResponseWordsTerminator',
22
+ 'TokenLimitTerminator',
23
+ ]
@@ -0,0 +1,44 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ from abc import ABC, abstractmethod
15
+ from typing import List, Optional, Tuple
16
+
17
+ from camel.messages import BaseMessage
18
+
19
+
20
+ class BaseTerminator(ABC):
21
+
22
+ def __init__(self, *args, **kwargs) -> None:
23
+ self._terminated: bool = False
24
+ self._termination_reason: Optional[str] = None
25
+
26
+ @abstractmethod
27
+ def is_terminated(self, *args, **kwargs) -> Tuple[bool, Optional[str]]:
28
+ pass
29
+
30
+ @abstractmethod
31
+ def reset(self):
32
+ pass
33
+
34
+
35
+ class ResponseTerminator(BaseTerminator):
36
+
37
+ @abstractmethod
38
+ def is_terminated(
39
+ self, messages: List[BaseMessage]) -> Tuple[bool, Optional[str]]:
40
+ pass
41
+
42
+ @abstractmethod
43
+ def reset(self):
44
+ pass
@@ -0,0 +1,118 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ from collections import defaultdict
15
+ from typing import Dict, List, Optional, Tuple
16
+
17
+ from camel.messages import BaseMessage
18
+ from camel.types import TerminationMode
19
+
20
+ from .base import ResponseTerminator
21
+
22
+
23
+ class ResponseWordsTerminator(ResponseTerminator):
24
+ r"""Terminate agent when some words reached to occurrence
25
+ limit by any message of the response.
26
+
27
+ Args:
28
+ words_dict (dict): Dictionary of words and its occurrence
29
+ threshold.
30
+ case_sensitive (bool): Whether count the words as
31
+ case-sensitive. (default: :obj:`False`)
32
+ mode (TerminationMode): Whether terminate agent if any
33
+ or all pre-set words reached the threshold.
34
+ (default: :obj:`TerminationMode.ANY`)
35
+ """
36
+
37
+ def __init__(self, words_dict: Dict[str,
38
+ int], case_sensitive: bool = False,
39
+ mode: TerminationMode = TerminationMode.ANY):
40
+ super().__init__()
41
+ self.words_dict = words_dict
42
+ self.case_sensitive = case_sensitive
43
+ self.mode = mode
44
+ self._word_count_dict: List[Dict[str, int]] = []
45
+ self._validate()
46
+
47
+ def _validate(self):
48
+ if len(self.words_dict) == 0:
49
+ raise ValueError("`words_dict` cannot be empty")
50
+ for word in self.words_dict:
51
+ threshold = self.words_dict[word]
52
+ if threshold <= 0:
53
+ raise ValueError(f"Threshold for word `{word}` should "
54
+ f"be larger than 0, got `{threshold}`")
55
+
56
+ def is_terminated(
57
+ self, messages: List[BaseMessage]) -> Tuple[bool, Optional[str]]:
58
+ r"""Whether terminate the agent by checking the occurrence
59
+ of specified words reached to preset thresholds.
60
+
61
+ Args:
62
+ messages (list): List of :obj:`BaseMessage` from a response.
63
+
64
+ Returns:
65
+ tuple: A tuple containing whether the agent should be
66
+ terminated and a string of termination reason.
67
+ """
68
+ if self._terminated:
69
+ return True, self._termination_reason
70
+
71
+ for i in range(len(messages)):
72
+ if i >= len(self._word_count_dict):
73
+ self._word_count_dict.append(defaultdict(int))
74
+
75
+ for word in self.words_dict:
76
+ special_word = word if self.case_sensitive else word.lower()
77
+ for i, message in enumerate(messages):
78
+ if self.case_sensitive:
79
+ content = message.content
80
+ else:
81
+ content = message.content.lower()
82
+ if special_word in content:
83
+ self._word_count_dict[i][word] += 1
84
+
85
+ num_reached: List[int] = []
86
+ all_reasons: List[List[str]] = []
87
+ for i in range(len(self._word_count_dict)):
88
+ reached = 0
89
+ reasons: List[str] = []
90
+ for word, value in self._word_count_dict[i].items():
91
+ if value >= self.words_dict[word]:
92
+ reached += 1
93
+ reason = (f"Word `{word}` appears {value} times in the "
94
+ f"{i + 1} message of the response which has "
95
+ f"reached termination threshold "
96
+ f"{self.words_dict[word]}.")
97
+ reasons.append(reason)
98
+ all_reasons.append(reasons)
99
+ num_reached.append(reached)
100
+
101
+ for i, reached in enumerate(num_reached):
102
+ if self.mode == TerminationMode.ANY:
103
+ if reached > 0:
104
+ self._terminated = True
105
+ self._termination_reason = "\n".join(all_reasons[i])
106
+ elif self.mode == TerminationMode.ALL:
107
+ if reached >= len(self.words_dict):
108
+ self._terminated = True
109
+ self._termination_reason = "\n".join(all_reasons[i])
110
+ else:
111
+ raise ValueError(f"Unsupported termination mode "
112
+ f"`{self.mode}`")
113
+ return self._terminated, self._termination_reason
114
+
115
+ def reset(self):
116
+ self._terminated = False
117
+ self._termination_reason = None
118
+ self._word_count_dict = defaultdict(int)
@@ -0,0 +1,55 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ from typing import Optional, Tuple
15
+
16
+ from camel.terminators import BaseTerminator
17
+
18
+
19
+ class TokenLimitTerminator(BaseTerminator):
20
+ r"""Terminate agent if number of tokens reached to token limit threshold.
21
+
22
+ Args:
23
+ token_limit (int): Token limit threshold.
24
+ """
25
+
26
+ def __init__(self, token_limit: int):
27
+ super().__init__()
28
+ self.token_limit = token_limit
29
+
30
+ def _validate(self):
31
+ if self.token_limit <= 0:
32
+ raise ValueError(f"`token_limit` should be a "
33
+ f"value larger than 0, got {self.token_limit}.")
34
+
35
+ def is_terminated(self, num_tokens: int) -> Tuple[bool, Optional[str]]:
36
+ r"""Whether terminate the agent by checking number of
37
+ used tokens reached to token limit.
38
+
39
+ Args:
40
+ num_tokens (int): Number of tokens.
41
+
42
+ Returns:
43
+ tuple: A tuple containing whether the agent should be
44
+ terminated and a string of termination reason.
45
+ """
46
+ if self._terminated:
47
+ return True, self._termination_reason
48
+ if num_tokens >= self.token_limit:
49
+ self._terminated = True
50
+ self._termination_reason = "max_tokens_exceeded"
51
+ return self._terminated, self._termination_reason
52
+
53
+ def reset(self):
54
+ self._terminated = False
55
+ self._termination_reason = None
@@ -0,0 +1,54 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ from .enums import (
15
+ RoleType,
16
+ ModelType,
17
+ TaskType,
18
+ TerminationMode,
19
+ OpenAIBackendRole,
20
+ EmbeddingModelType,
21
+ VectorDistance,
22
+ )
23
+ from .openai_types import (
24
+ Choice,
25
+ ChatCompletion,
26
+ ChatCompletionChunk,
27
+ ChatCompletionMessage,
28
+ ChatCompletionMessageParam,
29
+ ChatCompletionSystemMessageParam,
30
+ ChatCompletionUserMessageParam,
31
+ ChatCompletionAssistantMessageParam,
32
+ ChatCompletionFunctionMessageParam,
33
+ CompletionUsage,
34
+ )
35
+
36
+ __all__ = [
37
+ 'RoleType',
38
+ 'ModelType',
39
+ 'TaskType',
40
+ 'TerminationMode',
41
+ 'OpenAIBackendRole',
42
+ 'EmbeddingModelType',
43
+ 'VectorDistance',
44
+ 'Choice',
45
+ 'ChatCompletion',
46
+ 'ChatCompletionChunk',
47
+ 'ChatCompletionMessage',
48
+ 'ChatCompletionMessageParam',
49
+ 'ChatCompletionSystemMessageParam',
50
+ 'ChatCompletionUserMessageParam',
51
+ 'ChatCompletionAssistantMessageParam',
52
+ 'ChatCompletionFunctionMessageParam',
53
+ 'CompletionUsage',
54
+ ]
camel/types/enums.py ADDED
@@ -0,0 +1,176 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ import re
15
+ from enum import Enum
16
+
17
+
18
+ class RoleType(Enum):
19
+ ASSISTANT = "assistant"
20
+ USER = "user"
21
+ CRITIC = "critic"
22
+ EMBODIMENT = "embodiment"
23
+ DEFAULT = "default"
24
+
25
+
26
+ class ModelType(Enum):
27
+ GPT_3_5_TURBO = "gpt-3.5-turbo-1106"
28
+ GPT_3_5_TURBO_16K = "gpt-3.5-turbo-1106"
29
+ GPT_4 = "gpt-4"
30
+ GPT_4_32K = "gpt-4-32k"
31
+ GPT_4_TURBO = "gpt-4-1106-preview"
32
+ GPT_4_TURBO_VISION = "gpt-4-vision-preview"
33
+
34
+ STUB = "stub"
35
+
36
+ LLAMA_2 = "llama-2"
37
+ VICUNA = "vicuna"
38
+ VICUNA_16K = "vicuna-16k"
39
+
40
+ @property
41
+ def value_for_tiktoken(self) -> str:
42
+ return self.value if self is not ModelType.STUB else "gpt-3.5-turbo"
43
+
44
+ @property
45
+ def is_openai(self) -> bool:
46
+ r"""Returns whether this type of models is an OpenAI-released model."""
47
+ return self in {
48
+ ModelType.GPT_3_5_TURBO,
49
+ ModelType.GPT_3_5_TURBO_16K,
50
+ ModelType.GPT_4,
51
+ ModelType.GPT_4_32K,
52
+ ModelType.GPT_4_TURBO,
53
+ ModelType.GPT_4_TURBO_VISION,
54
+ }
55
+
56
+ @property
57
+ def is_open_source(self) -> bool:
58
+ r"""Returns whether this type of models is open-source."""
59
+ return self in {
60
+ ModelType.LLAMA_2,
61
+ ModelType.VICUNA,
62
+ ModelType.VICUNA_16K,
63
+ }
64
+
65
+ @property
66
+ def token_limit(self) -> int:
67
+ r"""Returns the maximum token limit for a given model.
68
+ Returns:
69
+ int: The maximum token limit for the given model.
70
+ """
71
+ if self is ModelType.GPT_3_5_TURBO:
72
+ return 16385
73
+ elif self is ModelType.GPT_3_5_TURBO_16K:
74
+ return 16385
75
+ elif self is ModelType.GPT_4:
76
+ return 8192
77
+ elif self is ModelType.GPT_4_32K:
78
+ return 32768
79
+ elif self is ModelType.GPT_4_TURBO:
80
+ return 128000
81
+ elif self is ModelType.GPT_4_TURBO_VISION:
82
+ return 128000
83
+ elif self is ModelType.STUB:
84
+ return 4096
85
+ elif self is ModelType.LLAMA_2:
86
+ return 4096
87
+ elif self is ModelType.VICUNA:
88
+ # reference: https://lmsys.org/blog/2023-03-30-vicuna/
89
+ return 2048
90
+ elif self is ModelType.VICUNA_16K:
91
+ return 16384
92
+ else:
93
+ raise ValueError("Unknown model type")
94
+
95
+ def validate_model_name(self, model_name: str) -> bool:
96
+ r"""Checks whether the model type and the model name matches.
97
+
98
+ Args:
99
+ model_name (str): The name of the model, e.g. "vicuna-7b-v1.5".
100
+ Returns:
101
+ bool: Whether the model type mathches the model name.
102
+ """
103
+ if self is ModelType.VICUNA:
104
+ pattern = r'^vicuna-\d+b-v\d+\.\d+$'
105
+ return bool(re.match(pattern, model_name))
106
+ elif self is ModelType.VICUNA_16K:
107
+ pattern = r'^vicuna-\d+b-v\d+\.\d+-16k$'
108
+ return bool(re.match(pattern, model_name))
109
+ elif self is ModelType.LLAMA_2:
110
+ return (self.value in model_name.lower()
111
+ or "llama2" in model_name.lower())
112
+ else:
113
+ return self.value in model_name.lower()
114
+
115
+
116
+ class EmbeddingModelType(Enum):
117
+ ADA_2 = "text-embedding-ada-002"
118
+ ADA_1 = "text-embedding-ada-001"
119
+ BABBAGE_1 = "text-embedding-babbage-001"
120
+ CURIE_1 = "text-embedding-curie-001"
121
+ DAVINCI_1 = "text-embedding-davinci-001"
122
+
123
+ @property
124
+ def is_openai(self) -> bool:
125
+ r"""Returns whether this type of models is an OpenAI-released model."""
126
+ return self in {
127
+ EmbeddingModelType.ADA_2,
128
+ EmbeddingModelType.ADA_1,
129
+ EmbeddingModelType.BABBAGE_1,
130
+ EmbeddingModelType.CURIE_1,
131
+ EmbeddingModelType.DAVINCI_1,
132
+ }
133
+
134
+ @property
135
+ def output_dim(self) -> int:
136
+ if self is EmbeddingModelType.ADA_2:
137
+ return 1536
138
+ elif self is EmbeddingModelType.ADA_1:
139
+ return 1024
140
+ elif self is EmbeddingModelType.BABBAGE_1:
141
+ return 2048
142
+ elif self is EmbeddingModelType.CURIE_1:
143
+ return 4096
144
+ elif self is EmbeddingModelType.DAVINCI_1:
145
+ return 12288
146
+ else:
147
+ raise ValueError(f"Unknown model type {self}.")
148
+
149
+
150
+ class TaskType(Enum):
151
+ AI_SOCIETY = "ai_society"
152
+ CODE = "code"
153
+ MISALIGNMENT = "misalignment"
154
+ TRANSLATION = "translation"
155
+ EVALUATION = "evaluation"
156
+ SOLUTION_EXTRACTION = "solution_extraction"
157
+ ROLE_DESCRIPTION = "role_description"
158
+ DEFAULT = "default"
159
+
160
+
161
+ class VectorDistance(Enum):
162
+ DOT = 1
163
+ COSINE = 2
164
+ EUCLIDEAN = 3
165
+
166
+
167
+ class OpenAIBackendRole(Enum):
168
+ ASSISTANT = "assistant"
169
+ SYSTEM = "system"
170
+ USER = "user"
171
+ FUNCTION = "function"
172
+
173
+
174
+ class TerminationMode(Enum):
175
+ ANY = "any"
176
+ ALL = "all"
@@ -0,0 +1,39 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ # isort: skip_file
15
+ from openai.types.chat.chat_completion import ChatCompletion, Choice
16
+ from openai.types.chat.chat_completion_assistant_message_param import (
17
+ ChatCompletionAssistantMessageParam, )
18
+ from openai.types.chat.chat_completion_chunk import ChatCompletionChunk
19
+ from openai.types.chat.chat_completion_function_message_param import (
20
+ ChatCompletionFunctionMessageParam, )
21
+ from openai.types.chat.chat_completion_message import ChatCompletionMessage
22
+ from openai.types.chat.chat_completion_message_param import (
23
+ ChatCompletionMessageParam, )
24
+ from openai.types.chat.chat_completion_system_message_param import (
25
+ ChatCompletionSystemMessageParam, )
26
+ from openai.types.chat.chat_completion_user_message_param import (
27
+ ChatCompletionUserMessageParam, )
28
+ from openai.types.completion_usage import CompletionUsage
29
+
30
+ Choice = Choice
31
+ ChatCompletion = ChatCompletion
32
+ ChatCompletionChunk = ChatCompletionChunk
33
+ ChatCompletionMessage = ChatCompletionMessage
34
+ ChatCompletionMessageParam = ChatCompletionMessageParam
35
+ ChatCompletionSystemMessageParam = ChatCompletionSystemMessageParam
36
+ ChatCompletionUserMessageParam = ChatCompletionUserMessageParam
37
+ ChatCompletionAssistantMessageParam = ChatCompletionAssistantMessageParam
38
+ ChatCompletionFunctionMessageParam = ChatCompletionFunctionMessageParam
39
+ CompletionUsage = CompletionUsage
@@ -0,0 +1,47 @@
1
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
2
+ # Licensed under the Apache License, Version 2.0 (the “License”);
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an “AS IS” BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
14
+ from .python_interpreter import PythonInterpreter
15
+ from .commons import (
16
+ openai_api_key_required,
17
+ print_text_animated,
18
+ get_prompt_template_key_words,
19
+ get_first_int,
20
+ download_tasks,
21
+ parse_doc,
22
+ get_task_list,
23
+ check_server_running,
24
+ )
25
+ from .token_counting import (
26
+ get_model_encoding,
27
+ BaseTokenCounter,
28
+ OpenAITokenCounter,
29
+ OpenSourceTokenCounter,
30
+ )
31
+
32
+ __all__ = [
33
+ 'count_tokens_openai_chat_models',
34
+ 'openai_api_key_required',
35
+ 'print_text_animated',
36
+ 'get_prompt_template_key_words',
37
+ 'get_first_int',
38
+ 'download_tasks',
39
+ 'PythonInterpreter',
40
+ 'parse_doc',
41
+ 'get_task_list',
42
+ 'get_model_encoding',
43
+ 'check_server_running',
44
+ 'BaseTokenCounter',
45
+ 'OpenAITokenCounter',
46
+ 'OpenSourceTokenCounter',
47
+ ]