camel-ai 0.2.16__py3-none-any.whl → 0.2.17__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of camel-ai might be problematic. Click here for more details.
- camel/__init__.py +1 -1
- camel/agents/chat_agent.py +18 -4
- camel/agents/multi_hop_generator_agent.py +85 -0
- camel/agents/programmed_agent_instruction.py +148 -0
- camel/benchmarks/__init__.py +2 -0
- camel/benchmarks/apibank.py +5 -0
- camel/benchmarks/apibench.py +8 -4
- camel/benchmarks/gaia.py +2 -2
- camel/benchmarks/ragbench.py +333 -0
- camel/bots/__init__.py +1 -1
- camel/bots/discord/__init__.py +26 -0
- camel/bots/discord/discord_app.py +384 -0
- camel/bots/discord/discord_installation.py +64 -0
- camel/bots/discord/discord_store.py +160 -0
- camel/configs/__init__.py +3 -0
- camel/configs/anthropic_config.py +17 -15
- camel/configs/internlm_config.py +60 -0
- camel/data_collector/base.py +5 -5
- camel/data_collector/sharegpt_collector.py +2 -2
- camel/datagen/self_instruct/self_instruct.py +1 -1
- camel/datagen/self_instruct/templates.py +12 -14
- camel/loaders/__init__.py +2 -0
- camel/loaders/panda_reader.py +337 -0
- camel/messages/__init__.py +10 -4
- camel/messages/conversion/conversation_models.py +5 -0
- camel/messages/func_message.py +30 -22
- camel/models/__init__.py +2 -0
- camel/models/anthropic_model.py +1 -22
- camel/models/cohere_model.py +8 -0
- camel/models/gemini_model.py +10 -1
- camel/models/internlm_model.py +143 -0
- camel/models/mistral_model.py +14 -7
- camel/models/model_factory.py +3 -0
- camel/models/reward/__init__.py +2 -0
- camel/models/reward/skywork_model.py +88 -0
- camel/synthetic_datagen/source2synth/data_processor.py +373 -0
- camel/synthetic_datagen/source2synth/models.py +68 -0
- camel/synthetic_datagen/source2synth/user_data_processor_config.py +73 -0
- camel/toolkits/google_scholar_toolkit.py +9 -0
- camel/types/__init__.py +4 -2
- camel/types/enums.py +34 -1
- camel/types/openai_types.py +6 -4
- camel/types/unified_model_type.py +5 -0
- camel/utils/token_counting.py +3 -3
- {camel_ai-0.2.16.dist-info → camel_ai-0.2.17.dist-info}/METADATA +158 -187
- {camel_ai-0.2.16.dist-info → camel_ai-0.2.17.dist-info}/RECORD +48 -35
- {camel_ai-0.2.16.dist-info → camel_ai-0.2.17.dist-info}/WHEEL +1 -1
- camel/bots/discord_app.py +0 -138
- {camel_ai-0.2.16.dist-info → camel_ai-0.2.17.dist-info}/LICENSE +0 -0
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
|
+
|
|
15
|
+
from typing import Optional, Union
|
|
16
|
+
|
|
17
|
+
from camel.configs.base_config import BaseConfig
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class InternLMConfig(BaseConfig):
|
|
21
|
+
r"""Defines the parameters for generating chat completions using the
|
|
22
|
+
InternLM API. You can refer to the following link for more details:
|
|
23
|
+
https://internlm.intern-ai.org.cn/api/document
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
stream (bool, optional): Whether to stream the response.
|
|
27
|
+
(default: :obj:`False`)
|
|
28
|
+
temperature (float, optional): Controls the diversity and focus of
|
|
29
|
+
the generated results. Lower values make the output more focused,
|
|
30
|
+
while higher values make it more diverse. (default: :obj:`0.3`)
|
|
31
|
+
top_p (float, optional): Controls the diversity and focus of the
|
|
32
|
+
generated results. Higher values make the output more diverse,
|
|
33
|
+
while lower values make it more focused. (default: :obj:`0.9`)
|
|
34
|
+
max_tokens (Union[int, NotGiven], optional): Allows the model to
|
|
35
|
+
generate the maximum number of tokens.
|
|
36
|
+
(default: :obj:`NOT_GIVEN`)
|
|
37
|
+
tools (list, optional): Specifies an array of tools that the model can
|
|
38
|
+
call. It can contain one or more tool objects. During a function
|
|
39
|
+
call process, the model will select one tool from the array.
|
|
40
|
+
(default: :obj:`None`)
|
|
41
|
+
tool_choice (Union[dict[str, str], str], optional): Controls which (if
|
|
42
|
+
any) tool is called by the model. :obj:`"none"` means the model
|
|
43
|
+
will not call any tool and instead generates a message.
|
|
44
|
+
:obj:`"auto"` means the model can pick between generating a
|
|
45
|
+
message or calling one or more tools. :obj:`"required"` means the
|
|
46
|
+
model must call one or more tools. Specifying a particular tool
|
|
47
|
+
via {"type": "function", "function": {"name": "my_function"}}
|
|
48
|
+
forces the model to call that tool. :obj:`"none"` is the default
|
|
49
|
+
when no tools are present. :obj:`"auto"` is the default if tools
|
|
50
|
+
are present.
|
|
51
|
+
"""
|
|
52
|
+
|
|
53
|
+
stream: bool = False
|
|
54
|
+
temperature: float = 0.8
|
|
55
|
+
top_p: float = 0.9
|
|
56
|
+
max_tokens: Optional[int] = None
|
|
57
|
+
tool_choice: Optional[Union[dict[str, str], str]] = None
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
INTERNLM_API_PARAMS = {param for param in InternLMConfig.model_fields.keys()}
|
camel/data_collector/base.py
CHANGED
|
@@ -27,7 +27,7 @@ class CollectorData:
|
|
|
27
27
|
self,
|
|
28
28
|
id: UUID,
|
|
29
29
|
name: str,
|
|
30
|
-
role: Literal["user", "assistant", "system", "
|
|
30
|
+
role: Literal["user", "assistant", "system", "tool"],
|
|
31
31
|
message: Optional[str] = None,
|
|
32
32
|
function_call: Optional[Dict[str, Any]] = None,
|
|
33
33
|
) -> None:
|
|
@@ -52,7 +52,7 @@ class CollectorData:
|
|
|
52
52
|
ValueError: If neither message nor function call is provided.
|
|
53
53
|
|
|
54
54
|
"""
|
|
55
|
-
if role not in ["user", "assistant", "system", "
|
|
55
|
+
if role not in ["user", "assistant", "system", "tool"]:
|
|
56
56
|
raise ValueError(f"Role {role} not supported")
|
|
57
57
|
if role == "system" and function_call:
|
|
58
58
|
raise ValueError("System role cannot have function call")
|
|
@@ -82,7 +82,7 @@ class CollectorData:
|
|
|
82
82
|
name=name,
|
|
83
83
|
role=context["role"],
|
|
84
84
|
message=context["content"],
|
|
85
|
-
function_call=context.get("
|
|
85
|
+
function_call=context.get("tool_calls", None),
|
|
86
86
|
)
|
|
87
87
|
|
|
88
88
|
|
|
@@ -98,7 +98,7 @@ class BaseDataCollector(ABC):
|
|
|
98
98
|
|
|
99
99
|
def step(
|
|
100
100
|
self,
|
|
101
|
-
role: Literal["user", "assistant", "system", "
|
|
101
|
+
role: Literal["user", "assistant", "system", "tool"],
|
|
102
102
|
name: Optional[str] = None,
|
|
103
103
|
message: Optional[str] = None,
|
|
104
104
|
function_call: Optional[Dict[str, Any]] = None,
|
|
@@ -106,7 +106,7 @@ class BaseDataCollector(ABC):
|
|
|
106
106
|
r"""Record a message.
|
|
107
107
|
|
|
108
108
|
Args:
|
|
109
|
-
role (Literal["user", "assistant", "system", "
|
|
109
|
+
role (Literal["user", "assistant", "system", "tool"]):
|
|
110
110
|
The role of the message.
|
|
111
111
|
name (Optional[str], optional): The name of the agent.
|
|
112
112
|
(default: :obj:`None`)
|
|
@@ -131,7 +131,7 @@ class ShareGPTDataCollector(BaseDataCollector):
|
|
|
131
131
|
conversations.append(
|
|
132
132
|
{"from": "gpt", "value": message.message}
|
|
133
133
|
)
|
|
134
|
-
elif role == "function":
|
|
134
|
+
elif role == "function" or role == "tool":
|
|
135
135
|
conversations.append(
|
|
136
136
|
{
|
|
137
137
|
"from": "observation",
|
|
@@ -182,7 +182,7 @@ class ShareGPTDataCollector(BaseDataCollector):
|
|
|
182
182
|
if message.function_call:
|
|
183
183
|
context.append(prefix + json.dumps(message.function_call))
|
|
184
184
|
|
|
185
|
-
elif role == "function":
|
|
185
|
+
elif role == "function" or role == "tool":
|
|
186
186
|
context.append(prefix + json.dumps(message.message)) # type: ignore[attr-defined]
|
|
187
187
|
else:
|
|
188
188
|
context.append(prefix + str(message.message))
|
|
@@ -365,13 +365,13 @@ class SelfInstructPipeline:
|
|
|
365
365
|
and instances.
|
|
366
366
|
"""
|
|
367
367
|
while len(self.machine_tasks) < self.num_machine_instructions:
|
|
368
|
+
prompt, instruction = self.generate_machine_instruction()
|
|
368
369
|
existing_instructions = [
|
|
369
370
|
t["instruction"] for t in self.human_tasks
|
|
370
371
|
] + [t["instruction"] for t in self.machine_tasks]
|
|
371
372
|
for f in self.instruction_filter.filters:
|
|
372
373
|
if isinstance(f, RougeSimilarityFilter):
|
|
373
374
|
f.existing_instructions = existing_instructions
|
|
374
|
-
prompt, instruction = self.generate_machine_instruction()
|
|
375
375
|
if self.instruction_filter.filter(prompt, instruction):
|
|
376
376
|
instruction_dict = {
|
|
377
377
|
"id": f"machine_task_{len(self.machine_tasks) + 1}",
|
|
@@ -269,22 +269,20 @@ class SelfInstructTemplates:
|
|
|
269
269
|
'''
|
|
270
270
|
|
|
271
271
|
input_first_template_for_gen = '''You will be given a task,
|
|
272
|
-
|
|
272
|
+
Your job is to generate at most two example instances demonstrating how to
|
|
273
273
|
perform this task. For each instance:
|
|
274
|
-
- If the task requires input (as an actual
|
|
275
|
-
|
|
276
|
-
skip the input.
|
|
277
|
-
- Then provide the correct output.
|
|
274
|
+
- If the task requires input (as an actual example of the task), provide it.
|
|
275
|
+
- If the task can be answered directly without requiring input, omit the input section.
|
|
278
276
|
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
277
|
+
Example 1
|
|
278
|
+
Input: [Provide input here if needed, otherwise omit this section]
|
|
279
|
+
Output: [Provide the correct output]
|
|
280
|
+
|
|
281
|
+
Example 2
|
|
282
|
+
Input: [Provide input here if needed, otherwise omit this section]
|
|
283
|
+
Output: [Provide the correct output]
|
|
284
|
+
|
|
285
|
+
Do not include any additional commentary, explanations, or more than two instances.
|
|
288
286
|
|
|
289
287
|
Below are some examples:
|
|
290
288
|
|
camel/loaders/__init__.py
CHANGED
|
@@ -17,6 +17,7 @@ from .base_io import File, create_file, create_file_from_raw_bytes
|
|
|
17
17
|
from .chunkr_reader import ChunkrReader
|
|
18
18
|
from .firecrawl_reader import Firecrawl
|
|
19
19
|
from .jina_url_reader import JinaURLReader
|
|
20
|
+
from .panda_reader import PandaReader
|
|
20
21
|
from .unstructured_io import UnstructuredIO
|
|
21
22
|
|
|
22
23
|
__all__ = [
|
|
@@ -28,4 +29,5 @@ __all__ = [
|
|
|
28
29
|
'Firecrawl',
|
|
29
30
|
'Apify',
|
|
30
31
|
'ChunkrReader',
|
|
32
|
+
'PandaReader',
|
|
31
33
|
]
|
|
@@ -0,0 +1,337 @@
|
|
|
1
|
+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
|
+
import os
|
|
15
|
+
from functools import wraps
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Union
|
|
18
|
+
|
|
19
|
+
import pandas as pd
|
|
20
|
+
|
|
21
|
+
if TYPE_CHECKING:
|
|
22
|
+
from pandas import DataFrame
|
|
23
|
+
from pandasai import SmartDataframe
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def check_suffix(valid_suffixs: List[str]) -> Callable:
|
|
27
|
+
r"""A decorator to check the file suffix of a given file path.
|
|
28
|
+
|
|
29
|
+
Args:
|
|
30
|
+
valid_suffix (str): The required file suffix.
|
|
31
|
+
|
|
32
|
+
Returns:
|
|
33
|
+
Callable: The decorator function.
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
def decorator(func: Callable):
|
|
37
|
+
@wraps(func)
|
|
38
|
+
def wrapper(
|
|
39
|
+
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
40
|
+
) -> "DataFrame":
|
|
41
|
+
suffix = Path(file_path).suffix
|
|
42
|
+
if suffix not in valid_suffixs:
|
|
43
|
+
raise ValueError(
|
|
44
|
+
f"Only {', '.join(valid_suffixs)} files are supported"
|
|
45
|
+
)
|
|
46
|
+
return func(self, file_path, *args, **kwargs)
|
|
47
|
+
|
|
48
|
+
return wrapper
|
|
49
|
+
|
|
50
|
+
return decorator
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class PandaReader:
|
|
54
|
+
def __init__(self, config: Optional[Dict[str, Any]] = None) -> None:
|
|
55
|
+
r"""Initializes the PandaReader class.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
config (Optional[Dict[str, Any]], optional): The configuration
|
|
59
|
+
dictionary that can include LLM API settings for LLM-based
|
|
60
|
+
processing. If not provided, it will use OpenAI with the API
|
|
61
|
+
key from the OPENAI_API_KEY environment variable. You can
|
|
62
|
+
customize the LLM configuration by providing a 'llm' key in
|
|
63
|
+
the config dictionary. (default: :obj:`None`)
|
|
64
|
+
"""
|
|
65
|
+
from pandasai.llm import OpenAI # type: ignore[import-untyped]
|
|
66
|
+
|
|
67
|
+
self.config = config or {}
|
|
68
|
+
if "llm" not in self.config:
|
|
69
|
+
self.config["llm"] = OpenAI(
|
|
70
|
+
api_token=os.getenv("OPENAI_API_KEY"),
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
self.__LOADER = {
|
|
74
|
+
".csv": self.read_csv,
|
|
75
|
+
".xlsx": self.read_excel,
|
|
76
|
+
".xls": self.read_excel,
|
|
77
|
+
".json": self.read_json,
|
|
78
|
+
".parquet": self.read_parquet,
|
|
79
|
+
".sql": self.read_sql,
|
|
80
|
+
".html": self.read_html,
|
|
81
|
+
".feather": self.read_feather,
|
|
82
|
+
".dta": self.read_stata,
|
|
83
|
+
".sas": self.read_sas,
|
|
84
|
+
".pkl": self.read_pickle,
|
|
85
|
+
".h5": self.read_hdf,
|
|
86
|
+
".orc": self.read_orc,
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
def load(
|
|
90
|
+
self,
|
|
91
|
+
data: Union["DataFrame", str],
|
|
92
|
+
*args: Any,
|
|
93
|
+
**kwargs: Dict[str, Any],
|
|
94
|
+
) -> "SmartDataframe":
|
|
95
|
+
r"""Loads a file or DataFrame and returns a SmartDataframe object.
|
|
96
|
+
|
|
97
|
+
args:
|
|
98
|
+
data (Union[DataFrame, str]): The data to load.
|
|
99
|
+
*args (Any): Additional positional arguments.
|
|
100
|
+
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
101
|
+
|
|
102
|
+
Returns:
|
|
103
|
+
SmartDataframe: The SmartDataframe object.
|
|
104
|
+
"""
|
|
105
|
+
from pandas import DataFrame
|
|
106
|
+
from pandasai import SmartDataframe
|
|
107
|
+
|
|
108
|
+
if isinstance(data, DataFrame):
|
|
109
|
+
return SmartDataframe(data, config=self.config)
|
|
110
|
+
file_path = str(data)
|
|
111
|
+
path = Path(file_path)
|
|
112
|
+
if not file_path.startswith("http") and not path.exists():
|
|
113
|
+
raise FileNotFoundError(f"File {file_path} not found")
|
|
114
|
+
if path.suffix in self.__LOADER:
|
|
115
|
+
return SmartDataframe(
|
|
116
|
+
self.__LOADER[path.suffix](file_path, *args, **kwargs), # type: ignore[operator]
|
|
117
|
+
config=self.config,
|
|
118
|
+
)
|
|
119
|
+
else:
|
|
120
|
+
raise ValueError(f"Unsupported file format: {path.suffix}")
|
|
121
|
+
|
|
122
|
+
@check_suffix([".csv"])
|
|
123
|
+
def read_csv(
|
|
124
|
+
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
125
|
+
) -> "DataFrame":
|
|
126
|
+
r"""Reads a CSV file and returns a DataFrame.
|
|
127
|
+
|
|
128
|
+
Args:
|
|
129
|
+
file_path (str): The path to the CSV file.
|
|
130
|
+
*args (Any): Additional positional arguments.
|
|
131
|
+
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
132
|
+
|
|
133
|
+
Returns:
|
|
134
|
+
DataFrame: The DataFrame object.
|
|
135
|
+
"""
|
|
136
|
+
return pd.read_csv(file_path, *args, **kwargs)
|
|
137
|
+
|
|
138
|
+
@check_suffix([".xlsx", ".xls"])
|
|
139
|
+
def read_excel(
|
|
140
|
+
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
141
|
+
) -> "DataFrame":
|
|
142
|
+
r"""Reads an Excel file and returns a DataFrame.
|
|
143
|
+
|
|
144
|
+
Args:
|
|
145
|
+
file_path (str): The path to the Excel file.
|
|
146
|
+
*args (Any): Additional positional arguments.
|
|
147
|
+
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
148
|
+
|
|
149
|
+
Returns:
|
|
150
|
+
DataFrame: The DataFrame object.
|
|
151
|
+
"""
|
|
152
|
+
return pd.read_excel(file_path, *args, **kwargs)
|
|
153
|
+
|
|
154
|
+
@check_suffix([".json"])
|
|
155
|
+
def read_json(
|
|
156
|
+
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
157
|
+
) -> "DataFrame":
|
|
158
|
+
r"""Reads a JSON file and returns a DataFrame.
|
|
159
|
+
|
|
160
|
+
Args:
|
|
161
|
+
file_path (str): The path to the JSON file.
|
|
162
|
+
*args (Any): Additional positional arguments.
|
|
163
|
+
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
164
|
+
|
|
165
|
+
Returns:
|
|
166
|
+
DataFrame: The DataFrame object.
|
|
167
|
+
"""
|
|
168
|
+
return pd.read_json(file_path, *args, **kwargs)
|
|
169
|
+
|
|
170
|
+
@check_suffix([".parquet"])
|
|
171
|
+
def read_parquet(
|
|
172
|
+
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
173
|
+
) -> "DataFrame":
|
|
174
|
+
r"""Reads a Parquet file and returns a DataFrame.
|
|
175
|
+
|
|
176
|
+
Args:
|
|
177
|
+
file_path (str): The path to the Parquet file.
|
|
178
|
+
*args (Any): Additional positional arguments.
|
|
179
|
+
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
180
|
+
|
|
181
|
+
Returns:
|
|
182
|
+
DataFrame: The DataFrame object.
|
|
183
|
+
"""
|
|
184
|
+
return pd.read_parquet(file_path, *args, **kwargs)
|
|
185
|
+
|
|
186
|
+
def read_sql(self, *args: Any, **kwargs: Dict[str, Any]) -> "DataFrame":
|
|
187
|
+
r"""Reads a SQL file and returns a DataFrame.
|
|
188
|
+
|
|
189
|
+
Args:
|
|
190
|
+
*args (Any): Additional positional arguments.
|
|
191
|
+
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
192
|
+
|
|
193
|
+
Returns:
|
|
194
|
+
DataFrame: The DataFrame object.
|
|
195
|
+
"""
|
|
196
|
+
return pd.read_sql(*args, **kwargs)
|
|
197
|
+
|
|
198
|
+
def read_table(
|
|
199
|
+
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
200
|
+
) -> "DataFrame":
|
|
201
|
+
r"""Reads a table and returns a DataFrame.
|
|
202
|
+
|
|
203
|
+
Args:
|
|
204
|
+
file_path (str): The path to the table.
|
|
205
|
+
*args (Any): Additional positional arguments.
|
|
206
|
+
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
207
|
+
|
|
208
|
+
Returns:
|
|
209
|
+
DataFrame: The DataFrame object.
|
|
210
|
+
"""
|
|
211
|
+
return pd.read_table(file_path, *args, **kwargs)
|
|
212
|
+
|
|
213
|
+
def read_clipboard(
|
|
214
|
+
self, *args: Any, **kwargs: Dict[str, Any]
|
|
215
|
+
) -> "DataFrame":
|
|
216
|
+
r"""Reads a clipboard and returns a DataFrame.
|
|
217
|
+
|
|
218
|
+
Args:
|
|
219
|
+
*args (Any): Additional positional arguments.
|
|
220
|
+
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
221
|
+
|
|
222
|
+
Returns:
|
|
223
|
+
DataFrame: The DataFrame object.
|
|
224
|
+
"""
|
|
225
|
+
return pd.read_clipboard(*args, **kwargs)
|
|
226
|
+
|
|
227
|
+
@check_suffix([".html"])
|
|
228
|
+
def read_html(
|
|
229
|
+
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
230
|
+
) -> "DataFrame":
|
|
231
|
+
r"""Reads an HTML file and returns a DataFrame.
|
|
232
|
+
|
|
233
|
+
Args:
|
|
234
|
+
file_path (str): The path to the HTML file.
|
|
235
|
+
*args (Any): Additional positional arguments.
|
|
236
|
+
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
237
|
+
|
|
238
|
+
Returns:
|
|
239
|
+
DataFrame: The DataFrame object.
|
|
240
|
+
"""
|
|
241
|
+
return pd.read_html(file_path, *args, **kwargs)
|
|
242
|
+
|
|
243
|
+
@check_suffix([".feather"])
|
|
244
|
+
def read_feather(
|
|
245
|
+
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
246
|
+
) -> "DataFrame":
|
|
247
|
+
r"""Reads a Feather file and returns a DataFrame.
|
|
248
|
+
|
|
249
|
+
Args:
|
|
250
|
+
file_path (str): The path to the Feather file.
|
|
251
|
+
*args (Any): Additional positional arguments.
|
|
252
|
+
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
253
|
+
|
|
254
|
+
Returns:
|
|
255
|
+
DataFrame: The DataFrame object.
|
|
256
|
+
"""
|
|
257
|
+
return pd.read_feather(file_path, *args, **kwargs)
|
|
258
|
+
|
|
259
|
+
@check_suffix([".dta"])
|
|
260
|
+
def read_stata(
|
|
261
|
+
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
262
|
+
) -> "DataFrame":
|
|
263
|
+
r"""Reads a Stata file and returns a DataFrame.
|
|
264
|
+
|
|
265
|
+
Args:
|
|
266
|
+
file_path (str): The path to the Stata file.
|
|
267
|
+
*args (Any): Additional positional arguments.
|
|
268
|
+
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
269
|
+
|
|
270
|
+
Returns:
|
|
271
|
+
DataFrame: The DataFrame object.
|
|
272
|
+
"""
|
|
273
|
+
return pd.read_stata(file_path, *args, **kwargs)
|
|
274
|
+
|
|
275
|
+
@check_suffix([".sas"])
|
|
276
|
+
def read_sas(
|
|
277
|
+
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
278
|
+
) -> "DataFrame":
|
|
279
|
+
r"""Reads a SAS file and returns a DataFrame.
|
|
280
|
+
|
|
281
|
+
Args:
|
|
282
|
+
file_path (str): The path to the SAS file.
|
|
283
|
+
*args (Any): Additional positional arguments.
|
|
284
|
+
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
285
|
+
|
|
286
|
+
Returns:
|
|
287
|
+
DataFrame: The DataFrame object.
|
|
288
|
+
"""
|
|
289
|
+
return pd.read_sas(file_path, *args, **kwargs)
|
|
290
|
+
|
|
291
|
+
@check_suffix([".pkl"])
|
|
292
|
+
def read_pickle(
|
|
293
|
+
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
294
|
+
) -> "DataFrame":
|
|
295
|
+
r"""Reads a Pickle file and returns a DataFrame.
|
|
296
|
+
|
|
297
|
+
Args:
|
|
298
|
+
file_path (str): The path to the Pickle file.
|
|
299
|
+
*args (Any): Additional positional arguments.
|
|
300
|
+
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
301
|
+
|
|
302
|
+
Returns:
|
|
303
|
+
DataFrame: The DataFrame object.
|
|
304
|
+
"""
|
|
305
|
+
return pd.read_pickle(file_path, *args, **kwargs)
|
|
306
|
+
|
|
307
|
+
@check_suffix([".h5"])
|
|
308
|
+
def read_hdf(
|
|
309
|
+
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
310
|
+
) -> "DataFrame":
|
|
311
|
+
r"""Reads an HDF file and returns a DataFrame.
|
|
312
|
+
|
|
313
|
+
Args:
|
|
314
|
+
file_path (str): The path to the HDF file.
|
|
315
|
+
*args (Any): Additional positional arguments.
|
|
316
|
+
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
317
|
+
|
|
318
|
+
Returns:
|
|
319
|
+
DataFrame: The DataFrame object.
|
|
320
|
+
"""
|
|
321
|
+
return pd.read_hdf(file_path, *args, **kwargs)
|
|
322
|
+
|
|
323
|
+
@check_suffix([".orc"])
|
|
324
|
+
def read_orc(
|
|
325
|
+
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
326
|
+
) -> "DataFrame":
|
|
327
|
+
r"""Reads an ORC file and returns a DataFrame.
|
|
328
|
+
|
|
329
|
+
Args:
|
|
330
|
+
file_path (str): The path to the ORC file.
|
|
331
|
+
*args (Any): Additional positional arguments.
|
|
332
|
+
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
333
|
+
|
|
334
|
+
Returns:
|
|
335
|
+
DataFrame: The DataFrame object.
|
|
336
|
+
"""
|
|
337
|
+
return pd.read_orc(file_path, *args, **kwargs)
|
camel/messages/__init__.py
CHANGED
|
@@ -11,11 +11,13 @@
|
|
|
11
11
|
# See the License for the specific language governing permissions and
|
|
12
12
|
# limitations under the License.
|
|
13
13
|
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
|
+
from typing import Union
|
|
15
|
+
|
|
14
16
|
from camel.types import (
|
|
15
17
|
ChatCompletionAssistantMessageParam,
|
|
16
|
-
ChatCompletionFunctionMessageParam,
|
|
17
18
|
ChatCompletionMessageParam,
|
|
18
19
|
ChatCompletionSystemMessageParam,
|
|
20
|
+
ChatCompletionToolMessageParam,
|
|
19
21
|
ChatCompletionUserMessageParam,
|
|
20
22
|
)
|
|
21
23
|
|
|
@@ -32,9 +34,13 @@ from .conversion.sharegpt.function_call_formatter import (
|
|
|
32
34
|
)
|
|
33
35
|
|
|
34
36
|
OpenAISystemMessage = ChatCompletionSystemMessageParam
|
|
35
|
-
OpenAIAssistantMessage =
|
|
37
|
+
OpenAIAssistantMessage = Union[
|
|
38
|
+
ChatCompletionAssistantMessageParam,
|
|
39
|
+
ChatCompletionToolMessageParam,
|
|
40
|
+
]
|
|
36
41
|
OpenAIUserMessage = ChatCompletionUserMessageParam
|
|
37
|
-
|
|
42
|
+
OpenAIToolMessageParam = ChatCompletionToolMessageParam
|
|
43
|
+
|
|
38
44
|
OpenAIMessage = ChatCompletionMessageParam
|
|
39
45
|
|
|
40
46
|
|
|
@@ -45,7 +51,7 @@ __all__ = [
|
|
|
45
51
|
'OpenAISystemMessage',
|
|
46
52
|
'OpenAIAssistantMessage',
|
|
47
53
|
'OpenAIUserMessage',
|
|
48
|
-
'
|
|
54
|
+
'OpenAIToolMessageParam',
|
|
49
55
|
'OpenAIMessage',
|
|
50
56
|
'FunctionCallFormatter',
|
|
51
57
|
'HermesFunctionFormatter',
|
|
@@ -69,6 +69,11 @@ class ShareGPTConversation(RootModel):
|
|
|
69
69
|
for i in range(1, len(messages)):
|
|
70
70
|
curr, prev = messages[i], messages[i - 1]
|
|
71
71
|
|
|
72
|
+
print("@@@@")
|
|
73
|
+
print(curr)
|
|
74
|
+
print(prev)
|
|
75
|
+
print("@@@@")
|
|
76
|
+
|
|
72
77
|
if curr.from_ == "tool":
|
|
73
78
|
if prev.from_ != "gpt" or "<tool_call>" not in prev.value:
|
|
74
79
|
raise ValueError(
|
camel/messages/func_message.py
CHANGED
|
@@ -11,6 +11,7 @@
|
|
|
11
11
|
# See the License for the specific language governing permissions and
|
|
12
12
|
# limitations under the License.
|
|
13
13
|
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
|
+
import json
|
|
14
15
|
from dataclasses import dataclass
|
|
15
16
|
from typing import Any, Dict, Optional
|
|
16
17
|
|
|
@@ -18,8 +19,8 @@ from camel.messages import (
|
|
|
18
19
|
BaseMessage,
|
|
19
20
|
HermesFunctionFormatter,
|
|
20
21
|
OpenAIAssistantMessage,
|
|
21
|
-
OpenAIFunctionMessage,
|
|
22
22
|
OpenAIMessage,
|
|
23
|
+
OpenAIToolMessageParam,
|
|
23
24
|
)
|
|
24
25
|
from camel.messages.conversion import (
|
|
25
26
|
ShareGPTMessage,
|
|
@@ -44,11 +45,14 @@ class FunctionCallingMessage(BaseMessage):
|
|
|
44
45
|
function. (default: :obj:`None`)
|
|
45
46
|
result (Optional[Any]): The result of function execution.
|
|
46
47
|
(default: :obj:`None`)
|
|
48
|
+
tool_call_id (Optional[str]): The ID of the tool call, if available.
|
|
49
|
+
(default: :obj:`None`)
|
|
47
50
|
"""
|
|
48
51
|
|
|
49
52
|
func_name: Optional[str] = None
|
|
50
53
|
args: Optional[Dict] = None
|
|
51
54
|
result: Optional[Any] = None
|
|
55
|
+
tool_call_id: Optional[str] = None
|
|
52
56
|
|
|
53
57
|
def to_openai_message(
|
|
54
58
|
self,
|
|
@@ -66,7 +70,7 @@ class FunctionCallingMessage(BaseMessage):
|
|
|
66
70
|
if role_at_backend == OpenAIBackendRole.ASSISTANT:
|
|
67
71
|
return self.to_openai_assistant_message()
|
|
68
72
|
elif role_at_backend == OpenAIBackendRole.FUNCTION:
|
|
69
|
-
return self.
|
|
73
|
+
return self.to_openai_tool_message()
|
|
70
74
|
else:
|
|
71
75
|
raise ValueError(f"Unsupported role: {role_at_backend}.")
|
|
72
76
|
|
|
@@ -120,24 +124,29 @@ class FunctionCallingMessage(BaseMessage):
|
|
|
120
124
|
" due to missing function name or arguments."
|
|
121
125
|
)
|
|
122
126
|
|
|
123
|
-
|
|
127
|
+
return {
|
|
124
128
|
"role": "assistant",
|
|
125
|
-
"content": self.content,
|
|
126
|
-
"
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
129
|
+
"content": self.content or "",
|
|
130
|
+
"tool_calls": [
|
|
131
|
+
{
|
|
132
|
+
"id": self.tool_call_id or "null",
|
|
133
|
+
"type": "function",
|
|
134
|
+
"function": {
|
|
135
|
+
"name": self.func_name,
|
|
136
|
+
"arguments": json.dumps(self.args),
|
|
137
|
+
},
|
|
138
|
+
}
|
|
139
|
+
],
|
|
130
140
|
}
|
|
131
141
|
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
r"""Converts the message to an :obj:`OpenAIMessage` object
|
|
136
|
-
with the role being "function".
|
|
142
|
+
def to_openai_tool_message(self) -> OpenAIToolMessageParam:
|
|
143
|
+
r"""Converts the message to an :obj:`OpenAIToolMessageParam` object
|
|
144
|
+
with the role being "tool".
|
|
137
145
|
|
|
138
146
|
Returns:
|
|
139
|
-
|
|
140
|
-
with its role being
|
|
147
|
+
OpenAIToolMessageParam: The converted
|
|
148
|
+
:obj:`OpenAIToolMessageParam` object with its role being
|
|
149
|
+
"tool".
|
|
141
150
|
"""
|
|
142
151
|
if not self.func_name:
|
|
143
152
|
raise ValueError(
|
|
@@ -145,11 +154,10 @@ class FunctionCallingMessage(BaseMessage):
|
|
|
145
154
|
" due to missing function name."
|
|
146
155
|
)
|
|
147
156
|
|
|
148
|
-
result_content =
|
|
149
|
-
msg_dict: OpenAIFunctionMessage = {
|
|
150
|
-
"role": "function",
|
|
151
|
-
"name": self.func_name,
|
|
152
|
-
"content": f'{result_content}',
|
|
153
|
-
}
|
|
157
|
+
result_content = json.dumps(self.result)
|
|
154
158
|
|
|
155
|
-
return
|
|
159
|
+
return {
|
|
160
|
+
"role": "tool",
|
|
161
|
+
"content": result_content,
|
|
162
|
+
"tool_call_id": self.tool_call_id or "null",
|
|
163
|
+
}
|