camel-ai 0.2.76a12__py3-none-any.whl → 0.2.76a13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of camel-ai might be problematic. Click here for more details.
- camel/__init__.py +1 -1
- camel/agents/mcp_agent.py +30 -27
- camel/loaders/__init__.py +11 -2
- camel/parsers/__init__.py +18 -0
- camel/parsers/mcp_tool_call_parser.py +176 -0
- camel/toolkits/mcp_toolkit.py +2 -1
- {camel_ai-0.2.76a12.dist-info → camel_ai-0.2.76a13.dist-info}/METADATA +5 -9
- {camel_ai-0.2.76a12.dist-info → camel_ai-0.2.76a13.dist-info}/RECORD +10 -9
- camel/loaders/pandas_reader.py +0 -368
- {camel_ai-0.2.76a12.dist-info → camel_ai-0.2.76a13.dist-info}/WHEEL +0 -0
- {camel_ai-0.2.76a12.dist-info → camel_ai-0.2.76a13.dist-info}/licenses/LICENSE +0 -0
camel/__init__.py
CHANGED
camel/agents/mcp_agent.py
CHANGED
|
@@ -15,16 +15,25 @@
|
|
|
15
15
|
import asyncio
|
|
16
16
|
import json
|
|
17
17
|
import platform
|
|
18
|
-
import
|
|
19
|
-
|
|
18
|
+
from typing import (
|
|
19
|
+
TYPE_CHECKING,
|
|
20
|
+
Any,
|
|
21
|
+
Callable,
|
|
22
|
+
Dict,
|
|
23
|
+
List,
|
|
24
|
+
Optional,
|
|
25
|
+
Union,
|
|
26
|
+
cast,
|
|
27
|
+
)
|
|
20
28
|
|
|
21
|
-
from camel.agents import ChatAgent
|
|
29
|
+
from camel.agents.chat_agent import ChatAgent
|
|
22
30
|
from camel.logger import get_logger
|
|
23
31
|
from camel.messages import BaseMessage
|
|
24
|
-
from camel.models import BaseModelBackend
|
|
32
|
+
from camel.models.base_model import BaseModelBackend
|
|
33
|
+
from camel.models.model_factory import ModelFactory
|
|
25
34
|
from camel.prompts import TextPrompt
|
|
26
35
|
from camel.responses import ChatAgentResponse
|
|
27
|
-
from camel.toolkits import FunctionTool
|
|
36
|
+
from camel.toolkits.function_tool import FunctionTool
|
|
28
37
|
from camel.types import (
|
|
29
38
|
BaseMCPRegistryConfig,
|
|
30
39
|
MCPRegistryType,
|
|
@@ -33,6 +42,9 @@ from camel.types import (
|
|
|
33
42
|
RoleType,
|
|
34
43
|
)
|
|
35
44
|
|
|
45
|
+
if TYPE_CHECKING:
|
|
46
|
+
from camel.toolkits.mcp_toolkit import MCPToolkit
|
|
47
|
+
|
|
36
48
|
# AgentOps decorator setting
|
|
37
49
|
try:
|
|
38
50
|
import os
|
|
@@ -44,6 +56,8 @@ try:
|
|
|
44
56
|
except (ImportError, AttributeError):
|
|
45
57
|
from camel.utils import track_agent
|
|
46
58
|
|
|
59
|
+
from camel.parsers.mcp_tool_call_parser import extract_tool_calls_from_text
|
|
60
|
+
|
|
47
61
|
logger = get_logger(__name__)
|
|
48
62
|
|
|
49
63
|
|
|
@@ -168,8 +182,10 @@ class MCPAgent(ChatAgent):
|
|
|
168
182
|
**kwargs,
|
|
169
183
|
)
|
|
170
184
|
|
|
171
|
-
def _initialize_mcp_toolkit(self) -> MCPToolkit:
|
|
185
|
+
def _initialize_mcp_toolkit(self) -> "MCPToolkit":
|
|
172
186
|
r"""Initialize the MCP toolkit from the provided configuration."""
|
|
187
|
+
from camel.toolkits.mcp_toolkit import MCPToolkit
|
|
188
|
+
|
|
173
189
|
config_dict = {}
|
|
174
190
|
for registry_config in self.registry_configs:
|
|
175
191
|
config_dict.update(registry_config.get_config())
|
|
@@ -334,27 +350,14 @@ class MCPAgent(ChatAgent):
|
|
|
334
350
|
task = f"## Task:\n {input_message}"
|
|
335
351
|
input_message = str(self._text_tools) + task
|
|
336
352
|
response = await super().astep(input_message, *args, **kwargs)
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
end_match = re.search(r'```', content[json_start:])
|
|
347
|
-
if not end_match:
|
|
348
|
-
break
|
|
349
|
-
json_end = end_match.span()[0] + json_start
|
|
350
|
-
|
|
351
|
-
tool_json = content[json_start:json_end].strip('\n')
|
|
352
|
-
try:
|
|
353
|
-
tool_calls.append(json.loads(tool_json))
|
|
354
|
-
except json.JSONDecodeError:
|
|
355
|
-
logger.warning(f"Failed to parse JSON: {tool_json}")
|
|
356
|
-
continue
|
|
357
|
-
content = content[json_end:]
|
|
353
|
+
raw_content = response.msgs[0].content if response.msgs else ""
|
|
354
|
+
content = (
|
|
355
|
+
raw_content
|
|
356
|
+
if isinstance(raw_content, str)
|
|
357
|
+
else str(raw_content)
|
|
358
|
+
)
|
|
359
|
+
|
|
360
|
+
tool_calls = extract_tool_calls_from_text(content)
|
|
358
361
|
|
|
359
362
|
if not tool_calls:
|
|
360
363
|
return response
|
camel/loaders/__init__.py
CHANGED
|
@@ -21,7 +21,6 @@ from .jina_url_reader import JinaURLReader
|
|
|
21
21
|
from .markitdown import MarkItDownLoader
|
|
22
22
|
from .mineru_extractor import MinerU
|
|
23
23
|
from .mistral_reader import MistralReader
|
|
24
|
-
from .pandas_reader import PandasReader
|
|
25
24
|
from .scrapegraph_reader import ScrapeGraphAI
|
|
26
25
|
from .unstructured_io import UnstructuredIO
|
|
27
26
|
|
|
@@ -33,7 +32,6 @@ __all__ = [
|
|
|
33
32
|
'JinaURLReader',
|
|
34
33
|
'Firecrawl',
|
|
35
34
|
'Apify',
|
|
36
|
-
'PandasReader',
|
|
37
35
|
'ChunkrReader',
|
|
38
36
|
'ChunkrReaderConfig',
|
|
39
37
|
'MinerU',
|
|
@@ -42,3 +40,14 @@ __all__ = [
|
|
|
42
40
|
'ScrapeGraphAI',
|
|
43
41
|
'MistralReader',
|
|
44
42
|
]
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def __getattr__(name: str):
|
|
46
|
+
if name == 'PandasReader':
|
|
47
|
+
raise ImportError(
|
|
48
|
+
"PandasReader has been removed from camel.loaders. "
|
|
49
|
+
"The pandasai dependency limited pandas to version 1.5.3. "
|
|
50
|
+
"Please use ExcelToolkit from camel.toolkits instead for "
|
|
51
|
+
"handling structured data."
|
|
52
|
+
)
|
|
53
|
+
raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
|
+
"""Helper parsers used across the CAMEL project."""
|
|
15
|
+
|
|
16
|
+
from .mcp_tool_call_parser import extract_tool_calls_from_text
|
|
17
|
+
|
|
18
|
+
__all__ = ["extract_tool_calls_from_text"]
|
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
|
+
"""Utility functions for parsing MCP tool calls from model output."""
|
|
15
|
+
|
|
16
|
+
import ast
|
|
17
|
+
import json
|
|
18
|
+
import logging
|
|
19
|
+
import re
|
|
20
|
+
from typing import Any, Dict, List, Optional
|
|
21
|
+
|
|
22
|
+
try: # pragma: no cover - optional dependency
|
|
23
|
+
import yaml
|
|
24
|
+
except ImportError: # pragma: no cover
|
|
25
|
+
yaml = None # type: ignore[assignment]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
CODE_BLOCK_PATTERN = re.compile(
|
|
29
|
+
r"```(?:[a-z0-9_-]+)?\s*([\s\S]+?)\s*```",
|
|
30
|
+
re.IGNORECASE,
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
JSON_START_PATTERN = re.compile(r"[{\[]")
|
|
34
|
+
JSON_TOKEN_PATTERN = re.compile(
|
|
35
|
+
r"""
|
|
36
|
+
(?P<double>"(?:\\.|[^"\\])*")
|
|
37
|
+
|
|
|
38
|
+
(?P<single>'(?:\\.|[^'\\])*')
|
|
39
|
+
|
|
|
40
|
+
(?P<brace>[{}\[\]])
|
|
41
|
+
""",
|
|
42
|
+
re.VERBOSE,
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
logger = logging.getLogger(__name__)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def extract_tool_calls_from_text(content: str) -> List[Dict[str, Any]]:
|
|
49
|
+
"""Extract tool call dictionaries from raw text output."""
|
|
50
|
+
|
|
51
|
+
if not content:
|
|
52
|
+
return []
|
|
53
|
+
|
|
54
|
+
tool_calls: List[Dict[str, Any]] = []
|
|
55
|
+
seen_ranges: List[tuple[int, int]] = []
|
|
56
|
+
|
|
57
|
+
for match in CODE_BLOCK_PATTERN.finditer(content):
|
|
58
|
+
snippet = match.group(1).strip()
|
|
59
|
+
if not snippet:
|
|
60
|
+
continue
|
|
61
|
+
|
|
62
|
+
parsed = _try_parse_json_like(snippet)
|
|
63
|
+
if parsed is None:
|
|
64
|
+
logger.warning(
|
|
65
|
+
"Failed to parse JSON payload from fenced block: %s",
|
|
66
|
+
snippet,
|
|
67
|
+
)
|
|
68
|
+
continue
|
|
69
|
+
|
|
70
|
+
_collect_tool_calls(parsed, tool_calls)
|
|
71
|
+
seen_ranges.append((match.start(1), match.end(1)))
|
|
72
|
+
|
|
73
|
+
for start_match in JSON_START_PATTERN.finditer(content):
|
|
74
|
+
start_idx = start_match.start()
|
|
75
|
+
|
|
76
|
+
if any(start <= start_idx < stop for start, stop in seen_ranges):
|
|
77
|
+
continue
|
|
78
|
+
|
|
79
|
+
segment = _find_json_candidate(content, start_idx)
|
|
80
|
+
if segment is None:
|
|
81
|
+
continue
|
|
82
|
+
|
|
83
|
+
end_idx = start_idx + len(segment)
|
|
84
|
+
if any(start <= start_idx < stop for start, stop in seen_ranges):
|
|
85
|
+
continue
|
|
86
|
+
|
|
87
|
+
parsed = _try_parse_json_like(segment)
|
|
88
|
+
if parsed is None:
|
|
89
|
+
logger.debug(
|
|
90
|
+
"Unable to parse JSON-like candidate: %s",
|
|
91
|
+
_truncate_snippet(segment),
|
|
92
|
+
)
|
|
93
|
+
continue
|
|
94
|
+
|
|
95
|
+
_collect_tool_calls(parsed, tool_calls)
|
|
96
|
+
seen_ranges.append((start_idx, end_idx))
|
|
97
|
+
|
|
98
|
+
return tool_calls
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def _collect_tool_calls(
|
|
102
|
+
payload: Any, accumulator: List[Dict[str, Any]]
|
|
103
|
+
) -> None:
|
|
104
|
+
"""Collect valid tool call dictionaries from parsed payloads."""
|
|
105
|
+
|
|
106
|
+
if isinstance(payload, dict):
|
|
107
|
+
if payload.get("tool_name") is None:
|
|
108
|
+
return
|
|
109
|
+
accumulator.append(payload)
|
|
110
|
+
elif isinstance(payload, list):
|
|
111
|
+
for item in payload:
|
|
112
|
+
_collect_tool_calls(item, accumulator)
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def _try_parse_json_like(snippet: str) -> Optional[Any]:
|
|
116
|
+
"""Parse a JSON or JSON-like snippet into Python data."""
|
|
117
|
+
|
|
118
|
+
try:
|
|
119
|
+
return json.loads(snippet)
|
|
120
|
+
except json.JSONDecodeError as exc:
|
|
121
|
+
logger.debug(
|
|
122
|
+
"json.loads failed: %s | snippet=%s",
|
|
123
|
+
exc,
|
|
124
|
+
_truncate_snippet(snippet),
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
if yaml is not None:
|
|
128
|
+
try:
|
|
129
|
+
return yaml.safe_load(snippet)
|
|
130
|
+
except yaml.YAMLError:
|
|
131
|
+
pass
|
|
132
|
+
|
|
133
|
+
try:
|
|
134
|
+
return ast.literal_eval(snippet)
|
|
135
|
+
except (ValueError, SyntaxError):
|
|
136
|
+
return None
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def _find_json_candidate(content: str, start_idx: int) -> Optional[str]:
|
|
140
|
+
"""Locate a balanced JSON-like segment starting at ``start_idx``."""
|
|
141
|
+
|
|
142
|
+
opening = content[start_idx]
|
|
143
|
+
if opening not in "{[":
|
|
144
|
+
return None
|
|
145
|
+
|
|
146
|
+
stack = ["}" if opening == "{" else "]"]
|
|
147
|
+
|
|
148
|
+
for token in JSON_TOKEN_PATTERN.finditer(content, start_idx + 1):
|
|
149
|
+
if token.lastgroup in {"double", "single"}:
|
|
150
|
+
continue
|
|
151
|
+
|
|
152
|
+
brace = token.group("brace")
|
|
153
|
+
if brace in "{[":
|
|
154
|
+
stack.append("}" if brace == "{" else "]")
|
|
155
|
+
continue
|
|
156
|
+
|
|
157
|
+
if not stack:
|
|
158
|
+
return None
|
|
159
|
+
|
|
160
|
+
expected = stack.pop()
|
|
161
|
+
if brace != expected:
|
|
162
|
+
return None
|
|
163
|
+
|
|
164
|
+
if not stack:
|
|
165
|
+
return content[start_idx : token.end()]
|
|
166
|
+
|
|
167
|
+
return None
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def _truncate_snippet(snippet: str, limit: int = 120) -> str:
|
|
171
|
+
"""Return a truncated representation suitable for logging."""
|
|
172
|
+
|
|
173
|
+
compact = " ".join(snippet.strip().split())
|
|
174
|
+
if len(compact) <= limit:
|
|
175
|
+
return compact
|
|
176
|
+
return f"{compact[: limit - 3]}..."
|
camel/toolkits/mcp_toolkit.py
CHANGED
|
@@ -21,7 +21,8 @@ from typing import Any, Dict, List, Optional
|
|
|
21
21
|
from typing_extensions import TypeGuard
|
|
22
22
|
|
|
23
23
|
from camel.logger import get_logger
|
|
24
|
-
from camel.toolkits import BaseToolkit
|
|
24
|
+
from camel.toolkits.base import BaseToolkit
|
|
25
|
+
from camel.toolkits.function_tool import FunctionTool
|
|
25
26
|
from camel.utils.commons import run_async
|
|
26
27
|
from camel.utils.mcp_client import MCPClient, create_mcp_client
|
|
27
28
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: camel-ai
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.76a13
|
|
4
4
|
Summary: Communicative Agents for AI Society Study
|
|
5
5
|
Project-URL: Homepage, https://www.camel-ai.org/
|
|
6
6
|
Project-URL: Repository, https://github.com/camel-ai/camel
|
|
@@ -87,8 +87,7 @@ Requires-Dist: numpy<=2.2,>=1.2; extra == 'all'
|
|
|
87
87
|
Requires-Dist: onnxruntime<=1.19.2; extra == 'all'
|
|
88
88
|
Requires-Dist: openapi-spec-validator<0.8,>=0.7.1; extra == 'all'
|
|
89
89
|
Requires-Dist: openpyxl>=3.1.5; extra == 'all'
|
|
90
|
-
Requires-Dist: pandas
|
|
91
|
-
Requires-Dist: pandasai<3,>=2.3.0; extra == 'all'
|
|
90
|
+
Requires-Dist: pandas>=2; extra == 'all'
|
|
92
91
|
Requires-Dist: pgvector<0.3,>=0.2.4; extra == 'all'
|
|
93
92
|
Requires-Dist: playwright>=1.50.0; extra == 'all'
|
|
94
93
|
Requires-Dist: prance<24,>=23.6.21.0; extra == 'all'
|
|
@@ -166,7 +165,7 @@ Requires-Dist: datacommons<2,>=1.4.3; extra == 'data-tools'
|
|
|
166
165
|
Requires-Dist: math-verify<0.8,>=0.7.0; extra == 'data-tools'
|
|
167
166
|
Requires-Dist: networkx<4,>=3.4.2; extra == 'data-tools'
|
|
168
167
|
Requires-Dist: numpy<=2.2,>=1.2; extra == 'data-tools'
|
|
169
|
-
Requires-Dist: pandas
|
|
168
|
+
Requires-Dist: pandas>=2; extra == 'data-tools'
|
|
170
169
|
Requires-Dist: rouge<2,>=1.0.1; extra == 'data-tools'
|
|
171
170
|
Requires-Dist: stripe<12,>=11.3.0; extra == 'data-tools'
|
|
172
171
|
Requires-Dist: textblob<0.18,>=0.17.1; extra == 'data-tools'
|
|
@@ -220,7 +219,6 @@ Requires-Dist: numpy<=2.2,>=1.2; extra == 'document-tools'
|
|
|
220
219
|
Requires-Dist: onnxruntime<=1.19.2; extra == 'document-tools'
|
|
221
220
|
Requires-Dist: openapi-spec-validator<0.8,>=0.7.1; extra == 'document-tools'
|
|
222
221
|
Requires-Dist: openpyxl>=3.1.5; extra == 'document-tools'
|
|
223
|
-
Requires-Dist: pandasai<3,>=2.3.0; extra == 'document-tools'
|
|
224
222
|
Requires-Dist: prance<24,>=23.6.21.0; extra == 'document-tools'
|
|
225
223
|
Requires-Dist: pylatex>=1.4.2; extra == 'document-tools'
|
|
226
224
|
Requires-Dist: pymupdf<2,>=1.22.5; extra == 'document-tools'
|
|
@@ -245,7 +243,7 @@ Requires-Dist: mcp-simple-arxiv==0.2.2; extra == 'eigent'
|
|
|
245
243
|
Requires-Dist: numpy<=2.2,>=1.2; extra == 'eigent'
|
|
246
244
|
Requires-Dist: onnxruntime<=1.19.2; extra == 'eigent'
|
|
247
245
|
Requires-Dist: openpyxl>=3.1.5; extra == 'eigent'
|
|
248
|
-
Requires-Dist: pandas
|
|
246
|
+
Requires-Dist: pandas>=2; extra == 'eigent'
|
|
249
247
|
Requires-Dist: pydub<0.26,>=0.25.1; extra == 'eigent'
|
|
250
248
|
Requires-Dist: pylatex>=1.4.2; extra == 'eigent'
|
|
251
249
|
Requires-Dist: pytesseract>=0.3.13; extra == 'eigent'
|
|
@@ -305,8 +303,7 @@ Requires-Dist: numpy<=2.2,>=1.2; extra == 'owl'
|
|
|
305
303
|
Requires-Dist: onnxruntime<=1.19.2; extra == 'owl'
|
|
306
304
|
Requires-Dist: openapi-spec-validator<0.8,>=0.7.1; extra == 'owl'
|
|
307
305
|
Requires-Dist: openpyxl>=3.1.5; extra == 'owl'
|
|
308
|
-
Requires-Dist: pandas
|
|
309
|
-
Requires-Dist: pandasai<3,>=2.3.0; extra == 'owl'
|
|
306
|
+
Requires-Dist: pandas>=2; extra == 'owl'
|
|
310
307
|
Requires-Dist: playwright>=1.50.0; extra == 'owl'
|
|
311
308
|
Requires-Dist: prance<24,>=23.6.21.0; extra == 'owl'
|
|
312
309
|
Requires-Dist: pyautogui<0.10,>=0.9.54; extra == 'owl'
|
|
@@ -343,7 +340,6 @@ Requires-Dist: google-genai>=1.13.0; extra == 'rag'
|
|
|
343
340
|
Requires-Dist: nebula3-python==3.8.2; extra == 'rag'
|
|
344
341
|
Requires-Dist: neo4j<6,>=5.18.0; extra == 'rag'
|
|
345
342
|
Requires-Dist: numpy<=2.2,>=1.2; extra == 'rag'
|
|
346
|
-
Requires-Dist: pandasai<3,>=2.3.0; extra == 'rag'
|
|
347
343
|
Requires-Dist: protobuf>=6.0.0; extra == 'rag'
|
|
348
344
|
Requires-Dist: pymilvus<3,>=2.4.0; extra == 'rag'
|
|
349
345
|
Requires-Dist: pyobvector>=0.1.18; extra == 'rag'
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
camel/__init__.py,sha256=
|
|
1
|
+
camel/__init__.py,sha256=VhpEyVTNYd6JwdWL7o_GvaCbZHyDg0eJ_rNe8j0ncyQ,902
|
|
2
2
|
camel/generators.py,sha256=JRqj9_m1PF4qT6UtybzTQ-KBT9MJQt18OAAYvQ_fr2o,13844
|
|
3
3
|
camel/human.py,sha256=Xg8x1cS5KK4bQ1SDByiHZnzsRpvRP-KZViNvmu38xo4,5475
|
|
4
4
|
camel/logger.py,sha256=WgEwael_eT6D-lVAKHpKIpwXSTjvLbny5jbV1Ab8lnA,5760
|
|
@@ -12,7 +12,7 @@ camel/agents/critic_agent.py,sha256=L6cTbYjyZB0DCa51tQ6LZLA6my8kHLC4nktHySH78H4,
|
|
|
12
12
|
camel/agents/deductive_reasoner_agent.py,sha256=6BZGaq1hR6hKJuQtOfoYQnk_AkZpw_Mr7mUy2MspQgs,13540
|
|
13
13
|
camel/agents/embodied_agent.py,sha256=XBxBu5ZMmSJ4B2U3Z7SMwvLlgp6yNpaBe8HNQmY9CZA,7536
|
|
14
14
|
camel/agents/knowledge_graph_agent.py,sha256=7Tchhyvm1s8tQ3at7iGKZt70xWZllRXu2vwUFR37p10,9681
|
|
15
|
-
camel/agents/mcp_agent.py,sha256=
|
|
15
|
+
camel/agents/mcp_agent.py,sha256=WAs6JDO2hxTrB_bMvTB-dpOlu2b2BpciUbbBPGJp8Sg,16234
|
|
16
16
|
camel/agents/multi_hop_generator_agent.py,sha256=aYsZNsEFHxIq8_wDN8lZRkvRbfhlOYGBKezWr87y8Bs,4325
|
|
17
17
|
camel/agents/programmed_agent_instruction.py,sha256=99fLe41che3X6wPpNPJXRwl4If6EoQqQVWIoT3DKE1s,7124
|
|
18
18
|
camel/agents/repo_agent.py,sha256=VGylY9GHAVba0gYZsa9EayejPg0Te_B9ZLpY6GdxX0o,22285
|
|
@@ -143,7 +143,7 @@ camel/interpreters/ipython_interpreter.py,sha256=V-Z_nIwaKmmivv_gD6nwdzmqfBUW4Io
|
|
|
143
143
|
camel/interpreters/microsandbox_interpreter.py,sha256=sHh787YawKahoHpNUnujIVIreSSEuerF7TVxrbhDz_Y,14179
|
|
144
144
|
camel/interpreters/subprocess_interpreter.py,sha256=DMLJIlTiHk0QA_pH5CXESHdJk-5olKo3aUh0KNECqJI,17759
|
|
145
145
|
camel/interpreters/docker/Dockerfile,sha256=7LGYJrf4Okpqg9JSEXoWMCVN-0QUG89Y49BTQl7rjvA,1406
|
|
146
|
-
camel/loaders/__init__.py,sha256=
|
|
146
|
+
camel/loaders/__init__.py,sha256=L-1GQN69dqpe1FfdKGV2XRBPtpaUcztU6cMUeeJO9a8,1909
|
|
147
147
|
camel/loaders/apify_reader.py,sha256=oaVjKyNhJhG-hTuIwrpZ2hsB4XTL0M-kUksgSL2R0ck,7952
|
|
148
148
|
camel/loaders/base_io.py,sha256=zsbdBPHgSPFyQrtiUgAsHvy39QHWUObRYNaVvr-pPk0,10190
|
|
149
149
|
camel/loaders/base_loader.py,sha256=sZd-fHDNav1U0b2C3IKl4nX42OzrqRioCgRLajb6VdE,3080
|
|
@@ -154,7 +154,6 @@ camel/loaders/jina_url_reader.py,sha256=dL9J5JlsFKEhi4gU_vYIxKvvf_RJ4LY9gG6i8P8J
|
|
|
154
154
|
camel/loaders/markitdown.py,sha256=2tc9Tf2pMJFkFzJwTAge-2kdmgRBBAV3sXHjcbo9AVE,7058
|
|
155
155
|
camel/loaders/mineru_extractor.py,sha256=nKa5n7f3ergv1TURcbXZJP5mQpnSCIFdlWrxWn4hBz8,9070
|
|
156
156
|
camel/loaders/mistral_reader.py,sha256=N9y6mqxPri9w4wE3nfQ2Ch4ph9oxeMR5yJo2oOcQl-Y,5426
|
|
157
|
-
camel/loaders/pandas_reader.py,sha256=zTVrUWsnR6oeOqeL8KLlnapJeaB4El0wyIrEPY1aLus,11922
|
|
158
157
|
camel/loaders/scrapegraph_reader.py,sha256=k8EOV5-p41DHDr2ITV8BR1sMqBsvN41CN8Byj2cf5kY,3120
|
|
159
158
|
camel/loaders/unstructured_io.py,sha256=wA3fkDeS4mSPFkMDnWZzJYKDltio7l72BU9D3EGfoUs,17423
|
|
160
159
|
camel/memories/__init__.py,sha256=vaStmUZNcHI884Fg9IQTezd50x4Me5fhDej0uK0O5fk,1404
|
|
@@ -229,6 +228,8 @@ camel/models/reward/base_reward_model.py,sha256=erCmBCl51oFNjEHCXWxdHKIPNVJnQlNG
|
|
|
229
228
|
camel/models/reward/evaluator.py,sha256=54ev5MuQ_5Tp0-LGO59EIuIkGrVMbtXXqpBR5Ps9kCM,2426
|
|
230
229
|
camel/models/reward/nemotron_model.py,sha256=EICDP2SyQpARupxsGWFKJHNADsVknT_t6tCG5R8AEPA,3916
|
|
231
230
|
camel/models/reward/skywork_model.py,sha256=KxHVDuwja2kZBCoCX8_sLBbXUGRSFsTPjgwsy1uyoyU,3246
|
|
231
|
+
camel/parsers/__init__.py,sha256=8f2k4MgjY73DeUQVVN1CaKqrIkOdexJqfJ4xNuAvbYM,862
|
|
232
|
+
camel/parsers/mcp_tool_call_parser.py,sha256=PsWBSJPBTmG9rgOXApdjrHA8P88e0gL0vLXTmvzr_Uw,4974
|
|
232
233
|
camel/personas/__init__.py,sha256=b2EU-unbIfCZjgq2dODf9mq0L9-VP5QXDVTjeYyvKCM,804
|
|
233
234
|
camel/personas/persona.py,sha256=PCu_hCXM3AqOc4J0HT8F6_WT_GKj8ahWM1Aw1lxL0q4,4253
|
|
234
235
|
camel/personas/persona_hub.py,sha256=v5O3BE_krz6s1OOiRhhnODuCf-2cqIw1tDCWgGG55dU,10555
|
|
@@ -357,7 +358,7 @@ camel/toolkits/klavis_toolkit.py,sha256=ZKerhgz5e-AV-iv0ftf07HgWikknIHjB3EOQswfu
|
|
|
357
358
|
camel/toolkits/linkedin_toolkit.py,sha256=wn4eXwYYlVA7doTna7k7WYhUqTBF83W79S-UJs_IQr0,8065
|
|
358
359
|
camel/toolkits/markitdown_toolkit.py,sha256=lwN6qQY8TLZkNWOqzeKZG3Fku-HMpGFrdRwhtPaJSlw,3844
|
|
359
360
|
camel/toolkits/math_toolkit.py,sha256=SJbzT6akHRlmqo1QwCj1f7-6pEv0sNKJbcYvYAylHQw,5439
|
|
360
|
-
camel/toolkits/mcp_toolkit.py,sha256=
|
|
361
|
+
camel/toolkits/mcp_toolkit.py,sha256=jRzxKhuZFT2S1HYIkJtGeTu1-veoQYvi7gdwDMSzVKE,37575
|
|
361
362
|
camel/toolkits/memory_toolkit.py,sha256=TeKYd5UMwgjVpuS2orb-ocFL13eUNKujvrFOruDCpm8,4436
|
|
362
363
|
camel/toolkits/meshy_toolkit.py,sha256=NbgdOBD3FYLtZf-AfonIv6-Q8-8DW129jsaP1PqI2rs,7126
|
|
363
364
|
camel/toolkits/message_agent_toolkit.py,sha256=yWvAaxoxAvDEtD7NH7IkkHIyfWIYK47WZhn5E_RaxKo,22661
|
|
@@ -486,7 +487,7 @@ camel/verifiers/math_verifier.py,sha256=tA1D4S0sm8nsWISevxSN0hvSVtIUpqmJhzqfbuMo
|
|
|
486
487
|
camel/verifiers/models.py,sha256=GdxYPr7UxNrR1577yW4kyroRcLGfd-H1GXgv8potDWU,2471
|
|
487
488
|
camel/verifiers/physics_verifier.py,sha256=c1grrRddcrVN7szkxhv2QirwY9viIRSITWeWFF5HmLs,30187
|
|
488
489
|
camel/verifiers/python_verifier.py,sha256=ogTz77wODfEcDN4tMVtiSkRQyoiZbHPY2fKybn59lHw,20558
|
|
489
|
-
camel_ai-0.2.
|
|
490
|
-
camel_ai-0.2.
|
|
491
|
-
camel_ai-0.2.
|
|
492
|
-
camel_ai-0.2.
|
|
490
|
+
camel_ai-0.2.76a13.dist-info/METADATA,sha256=TbWu2lS1Q7QSGlX46uMBPCL084szWP0vL8ql_z6ClTE,55165
|
|
491
|
+
camel_ai-0.2.76a13.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
492
|
+
camel_ai-0.2.76a13.dist-info/licenses/LICENSE,sha256=id0nB2my5kG0xXeimIu5zZrbHLS6EQvxvkKkzIHaT2k,11343
|
|
493
|
+
camel_ai-0.2.76a13.dist-info/RECORD,,
|
camel/loaders/pandas_reader.py
DELETED
|
@@ -1,368 +0,0 @@
|
|
|
1
|
-
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
2
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
3
|
-
# you may not use this file except in compliance with the License.
|
|
4
|
-
# You may obtain a copy of the License at
|
|
5
|
-
#
|
|
6
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
-
#
|
|
8
|
-
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
10
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
-
# See the License for the specific language governing permissions and
|
|
12
|
-
# limitations under the License.
|
|
13
|
-
# ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
|
|
14
|
-
from functools import wraps
|
|
15
|
-
from pathlib import Path
|
|
16
|
-
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Union
|
|
17
|
-
|
|
18
|
-
if TYPE_CHECKING:
|
|
19
|
-
from pandas import DataFrame
|
|
20
|
-
from pandasai import SmartDataframe
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
def check_suffix(valid_suffixs: List[str]) -> Callable:
|
|
24
|
-
r"""A decorator to check the file suffix of a given file path.
|
|
25
|
-
|
|
26
|
-
Args:
|
|
27
|
-
valid_suffix (str): The required file suffix.
|
|
28
|
-
|
|
29
|
-
Returns:
|
|
30
|
-
Callable: The decorator function.
|
|
31
|
-
"""
|
|
32
|
-
|
|
33
|
-
def decorator(func: Callable):
|
|
34
|
-
@wraps(func)
|
|
35
|
-
def wrapper(
|
|
36
|
-
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
37
|
-
) -> "DataFrame":
|
|
38
|
-
suffix = Path(file_path).suffix
|
|
39
|
-
if suffix not in valid_suffixs:
|
|
40
|
-
raise ValueError(
|
|
41
|
-
f"Only {', '.join(valid_suffixs)} files are supported"
|
|
42
|
-
)
|
|
43
|
-
return func(self, file_path, *args, **kwargs)
|
|
44
|
-
|
|
45
|
-
return wrapper
|
|
46
|
-
|
|
47
|
-
return decorator
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
class PandasReader:
|
|
51
|
-
def __init__(self, config: Optional[Dict[str, Any]] = None) -> None:
|
|
52
|
-
r"""Initializes the PandasReader class.
|
|
53
|
-
|
|
54
|
-
Args:
|
|
55
|
-
config (Optional[Dict[str, Any]], optional): The configuration
|
|
56
|
-
dictionary that can include LLM API settings for LLM-based
|
|
57
|
-
processing. If not provided, no LLM will be configured by
|
|
58
|
-
default. You can customize the LLM configuration by providing
|
|
59
|
-
a 'llm' key in the config dictionary. (default: :obj:`None`)
|
|
60
|
-
"""
|
|
61
|
-
self.config = config or {}
|
|
62
|
-
|
|
63
|
-
self.__LOADER = {
|
|
64
|
-
".csv": self.read_csv,
|
|
65
|
-
".xlsx": self.read_excel,
|
|
66
|
-
".xls": self.read_excel,
|
|
67
|
-
".json": self.read_json,
|
|
68
|
-
".parquet": self.read_parquet,
|
|
69
|
-
".sql": self.read_sql,
|
|
70
|
-
".html": self.read_html,
|
|
71
|
-
".feather": self.read_feather,
|
|
72
|
-
".dta": self.read_stata,
|
|
73
|
-
".sas": self.read_sas,
|
|
74
|
-
".pkl": self.read_pickle,
|
|
75
|
-
".h5": self.read_hdf,
|
|
76
|
-
".orc": self.read_orc,
|
|
77
|
-
}
|
|
78
|
-
|
|
79
|
-
def load(
|
|
80
|
-
self,
|
|
81
|
-
data: Union["DataFrame", str],
|
|
82
|
-
*args: Any,
|
|
83
|
-
**kwargs: Dict[str, Any],
|
|
84
|
-
) -> Union["DataFrame", "SmartDataframe"]:
|
|
85
|
-
r"""Loads a file or DataFrame and returns a DataFrame or
|
|
86
|
-
SmartDataframe object.
|
|
87
|
-
|
|
88
|
-
If an LLM is configured in the config dictionary, a SmartDataframe
|
|
89
|
-
will be returned, otherwise a regular pandas DataFrame will be
|
|
90
|
-
returned.
|
|
91
|
-
|
|
92
|
-
args:
|
|
93
|
-
data (Union[DataFrame, str]): The data to load.
|
|
94
|
-
*args (Any): Additional positional arguments.
|
|
95
|
-
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
96
|
-
|
|
97
|
-
Returns:
|
|
98
|
-
Union[DataFrame, SmartDataframe]: The DataFrame or SmartDataframe
|
|
99
|
-
object.
|
|
100
|
-
"""
|
|
101
|
-
from pandas import DataFrame
|
|
102
|
-
|
|
103
|
-
# Load the data into a pandas DataFrame
|
|
104
|
-
if isinstance(data, DataFrame):
|
|
105
|
-
df = data
|
|
106
|
-
else:
|
|
107
|
-
file_path = str(data)
|
|
108
|
-
path = Path(file_path)
|
|
109
|
-
if not file_path.startswith("http") and not path.exists():
|
|
110
|
-
raise FileNotFoundError(f"File {file_path} not found")
|
|
111
|
-
if path.suffix in self.__LOADER:
|
|
112
|
-
df = self.__LOADER[path.suffix](file_path, *args, **kwargs) # type: ignore[operator]
|
|
113
|
-
else:
|
|
114
|
-
raise ValueError(f"Unsupported file format: {path.suffix}")
|
|
115
|
-
|
|
116
|
-
# If an LLM is configured, return a SmartDataframe, otherwise return a
|
|
117
|
-
# regular DataFrame
|
|
118
|
-
if "llm" in self.config:
|
|
119
|
-
from pandasai import SmartDataframe
|
|
120
|
-
|
|
121
|
-
return SmartDataframe(df, config=self.config)
|
|
122
|
-
else:
|
|
123
|
-
return df
|
|
124
|
-
|
|
125
|
-
@check_suffix([".csv"])
|
|
126
|
-
def read_csv(
|
|
127
|
-
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
128
|
-
) -> "DataFrame":
|
|
129
|
-
r"""Reads a CSV file and returns a DataFrame.
|
|
130
|
-
|
|
131
|
-
Args:
|
|
132
|
-
file_path (str): The path to the CSV file.
|
|
133
|
-
*args (Any): Additional positional arguments.
|
|
134
|
-
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
135
|
-
|
|
136
|
-
Returns:
|
|
137
|
-
DataFrame: The DataFrame object.
|
|
138
|
-
"""
|
|
139
|
-
import pandas as pd
|
|
140
|
-
|
|
141
|
-
return pd.read_csv(file_path, *args, **kwargs)
|
|
142
|
-
|
|
143
|
-
@check_suffix([".xlsx", ".xls"])
|
|
144
|
-
def read_excel(
|
|
145
|
-
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
146
|
-
) -> "DataFrame":
|
|
147
|
-
r"""Reads an Excel file and returns a DataFrame.
|
|
148
|
-
|
|
149
|
-
Args:
|
|
150
|
-
file_path (str): The path to the Excel file.
|
|
151
|
-
*args (Any): Additional positional arguments.
|
|
152
|
-
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
153
|
-
|
|
154
|
-
Returns:
|
|
155
|
-
DataFrame: The DataFrame object.
|
|
156
|
-
"""
|
|
157
|
-
import pandas as pd
|
|
158
|
-
|
|
159
|
-
return pd.read_excel(file_path, *args, **kwargs)
|
|
160
|
-
|
|
161
|
-
@check_suffix([".json"])
|
|
162
|
-
def read_json(
|
|
163
|
-
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
164
|
-
) -> "DataFrame":
|
|
165
|
-
r"""Reads a JSON file and returns a DataFrame.
|
|
166
|
-
|
|
167
|
-
Args:
|
|
168
|
-
file_path (str): The path to the JSON file.
|
|
169
|
-
*args (Any): Additional positional arguments.
|
|
170
|
-
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
171
|
-
|
|
172
|
-
Returns:
|
|
173
|
-
DataFrame: The DataFrame object.
|
|
174
|
-
"""
|
|
175
|
-
import pandas as pd
|
|
176
|
-
|
|
177
|
-
return pd.read_json(file_path, *args, **kwargs)
|
|
178
|
-
|
|
179
|
-
@check_suffix([".parquet"])
|
|
180
|
-
def read_parquet(
|
|
181
|
-
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
182
|
-
) -> "DataFrame":
|
|
183
|
-
r"""Reads a Parquet file and returns a DataFrame.
|
|
184
|
-
|
|
185
|
-
Args:
|
|
186
|
-
file_path (str): The path to the Parquet file.
|
|
187
|
-
*args (Any): Additional positional arguments.
|
|
188
|
-
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
189
|
-
|
|
190
|
-
Returns:
|
|
191
|
-
DataFrame: The DataFrame object.
|
|
192
|
-
"""
|
|
193
|
-
import pandas as pd
|
|
194
|
-
|
|
195
|
-
return pd.read_parquet(file_path, *args, **kwargs)
|
|
196
|
-
|
|
197
|
-
def read_sql(self, *args: Any, **kwargs: Dict[str, Any]) -> "DataFrame":
|
|
198
|
-
r"""Reads a SQL file and returns a DataFrame.
|
|
199
|
-
|
|
200
|
-
Args:
|
|
201
|
-
*args (Any): Additional positional arguments.
|
|
202
|
-
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
203
|
-
|
|
204
|
-
Returns:
|
|
205
|
-
DataFrame: The DataFrame object.
|
|
206
|
-
"""
|
|
207
|
-
import pandas as pd
|
|
208
|
-
|
|
209
|
-
return pd.read_sql(*args, **kwargs)
|
|
210
|
-
|
|
211
|
-
def read_table(
|
|
212
|
-
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
213
|
-
) -> "DataFrame":
|
|
214
|
-
r"""Reads a table and returns a DataFrame.
|
|
215
|
-
|
|
216
|
-
Args:
|
|
217
|
-
file_path (str): The path to the table.
|
|
218
|
-
*args (Any): Additional positional arguments.
|
|
219
|
-
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
220
|
-
|
|
221
|
-
Returns:
|
|
222
|
-
DataFrame: The DataFrame object.
|
|
223
|
-
"""
|
|
224
|
-
import pandas as pd
|
|
225
|
-
|
|
226
|
-
return pd.read_table(file_path, *args, **kwargs)
|
|
227
|
-
|
|
228
|
-
def read_clipboard(
|
|
229
|
-
self, *args: Any, **kwargs: Dict[str, Any]
|
|
230
|
-
) -> "DataFrame":
|
|
231
|
-
r"""Reads a clipboard and returns a DataFrame.
|
|
232
|
-
|
|
233
|
-
Args:
|
|
234
|
-
*args (Any): Additional positional arguments.
|
|
235
|
-
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
236
|
-
|
|
237
|
-
Returns:
|
|
238
|
-
DataFrame: The DataFrame object.
|
|
239
|
-
"""
|
|
240
|
-
import pandas as pd
|
|
241
|
-
|
|
242
|
-
return pd.read_clipboard(*args, **kwargs)
|
|
243
|
-
|
|
244
|
-
@check_suffix([".html"])
|
|
245
|
-
def read_html(
|
|
246
|
-
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
247
|
-
) -> "DataFrame":
|
|
248
|
-
r"""Reads an HTML file and returns a DataFrame.
|
|
249
|
-
|
|
250
|
-
Args:
|
|
251
|
-
file_path (str): The path to the HTML file.
|
|
252
|
-
*args (Any): Additional positional arguments.
|
|
253
|
-
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
254
|
-
|
|
255
|
-
Returns:
|
|
256
|
-
DataFrame: The DataFrame object.
|
|
257
|
-
"""
|
|
258
|
-
import pandas as pd
|
|
259
|
-
|
|
260
|
-
return pd.read_html(file_path, *args, **kwargs)
|
|
261
|
-
|
|
262
|
-
@check_suffix([".feather"])
|
|
263
|
-
def read_feather(
|
|
264
|
-
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
265
|
-
) -> "DataFrame":
|
|
266
|
-
r"""Reads a Feather file and returns a DataFrame.
|
|
267
|
-
|
|
268
|
-
Args:
|
|
269
|
-
file_path (str): The path to the Feather file.
|
|
270
|
-
*args (Any): Additional positional arguments.
|
|
271
|
-
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
272
|
-
|
|
273
|
-
Returns:
|
|
274
|
-
DataFrame: The DataFrame object.
|
|
275
|
-
"""
|
|
276
|
-
import pandas as pd
|
|
277
|
-
|
|
278
|
-
return pd.read_feather(file_path, *args, **kwargs)
|
|
279
|
-
|
|
280
|
-
@check_suffix([".dta"])
|
|
281
|
-
def read_stata(
|
|
282
|
-
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
283
|
-
) -> "DataFrame":
|
|
284
|
-
r"""Reads a Stata file and returns a DataFrame.
|
|
285
|
-
|
|
286
|
-
Args:
|
|
287
|
-
file_path (str): The path to the Stata file.
|
|
288
|
-
*args (Any): Additional positional arguments.
|
|
289
|
-
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
290
|
-
|
|
291
|
-
Returns:
|
|
292
|
-
DataFrame: The DataFrame object.
|
|
293
|
-
"""
|
|
294
|
-
import pandas as pd
|
|
295
|
-
|
|
296
|
-
return pd.read_stata(file_path, *args, **kwargs)
|
|
297
|
-
|
|
298
|
-
@check_suffix([".sas"])
|
|
299
|
-
def read_sas(
|
|
300
|
-
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
301
|
-
) -> "DataFrame":
|
|
302
|
-
r"""Reads a SAS file and returns a DataFrame.
|
|
303
|
-
|
|
304
|
-
Args:
|
|
305
|
-
file_path (str): The path to the SAS file.
|
|
306
|
-
*args (Any): Additional positional arguments.
|
|
307
|
-
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
308
|
-
|
|
309
|
-
Returns:
|
|
310
|
-
DataFrame: The DataFrame object.
|
|
311
|
-
"""
|
|
312
|
-
import pandas as pd
|
|
313
|
-
|
|
314
|
-
return pd.read_sas(file_path, *args, **kwargs)
|
|
315
|
-
|
|
316
|
-
@check_suffix([".pkl"])
|
|
317
|
-
def read_pickle(
|
|
318
|
-
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
319
|
-
) -> "DataFrame":
|
|
320
|
-
r"""Reads a Pickle file and returns a DataFrame.
|
|
321
|
-
|
|
322
|
-
Args:
|
|
323
|
-
file_path (str): The path to the Pickle file.
|
|
324
|
-
*args (Any): Additional positional arguments.
|
|
325
|
-
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
326
|
-
|
|
327
|
-
Returns:
|
|
328
|
-
DataFrame: The DataFrame object.
|
|
329
|
-
"""
|
|
330
|
-
import pandas as pd
|
|
331
|
-
|
|
332
|
-
return pd.read_pickle(file_path, *args, **kwargs)
|
|
333
|
-
|
|
334
|
-
@check_suffix([".h5"])
|
|
335
|
-
def read_hdf(
|
|
336
|
-
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
337
|
-
) -> "DataFrame":
|
|
338
|
-
r"""Reads an HDF file and returns a DataFrame.
|
|
339
|
-
|
|
340
|
-
Args:
|
|
341
|
-
file_path (str): The path to the HDF file.
|
|
342
|
-
*args (Any): Additional positional arguments.
|
|
343
|
-
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
344
|
-
|
|
345
|
-
Returns:
|
|
346
|
-
DataFrame: The DataFrame object.
|
|
347
|
-
"""
|
|
348
|
-
import pandas as pd
|
|
349
|
-
|
|
350
|
-
return pd.read_hdf(file_path, *args, **kwargs)
|
|
351
|
-
|
|
352
|
-
@check_suffix([".orc"])
|
|
353
|
-
def read_orc(
|
|
354
|
-
self, file_path: str, *args: Any, **kwargs: Dict[str, Any]
|
|
355
|
-
) -> "DataFrame":
|
|
356
|
-
r"""Reads an ORC file and returns a DataFrame.
|
|
357
|
-
|
|
358
|
-
Args:
|
|
359
|
-
file_path (str): The path to the ORC file.
|
|
360
|
-
*args (Any): Additional positional arguments.
|
|
361
|
-
**kwargs (Dict[str, Any]): Additional keyword arguments.
|
|
362
|
-
|
|
363
|
-
Returns:
|
|
364
|
-
DataFrame: The DataFrame object.
|
|
365
|
-
"""
|
|
366
|
-
import pandas as pd
|
|
367
|
-
|
|
368
|
-
return pd.read_orc(file_path, *args, **kwargs)
|
|
File without changes
|
|
File without changes
|