erdo 0.1.4__py3-none-any.whl → 0.1.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of erdo might be problematic. Click here for more details.
- erdo/__init__.py +13 -13
- erdo/_generated/actions/__init__.py +3 -1
- erdo/_generated/actions/analysis.py +116 -4
- erdo/_generated/actions/bot.py +61 -5
- erdo/_generated/actions/codeexec.py +53 -28
- erdo/_generated/actions/llm.py +44 -5
- erdo/_generated/actions/memory.py +252 -57
- erdo/_generated/actions/pdfextractor.py +97 -0
- erdo/_generated/actions/resource_definitions.py +114 -12
- erdo/_generated/actions/sqlexec.py +86 -0
- erdo/_generated/actions/utils.py +178 -56
- erdo/_generated/actions/webparser.py +15 -5
- erdo/_generated/actions/websearch.py +15 -5
- erdo/_generated/condition/__init__.py +137 -127
- erdo/_generated/internal_actions.py +14 -2
- erdo/_generated/types.py +92 -48
- erdo/actions/__init__.py +8 -8
- erdo/bot_permissions.py +266 -0
- erdo/config/__init__.py +5 -0
- erdo/config/config.py +140 -0
- erdo/invoke/__init__.py +10 -0
- erdo/invoke/client.py +213 -0
- erdo/invoke/invoke.py +244 -0
- erdo/sync/__init__.py +17 -0
- erdo/sync/client.py +95 -0
- erdo/sync/extractor.py +482 -0
- erdo/sync/sync.py +327 -0
- erdo/types.py +516 -18
- {erdo-0.1.4.dist-info → erdo-0.1.6.dist-info}/METADATA +4 -1
- erdo-0.1.6.dist-info/RECORD +45 -0
- erdo-0.1.4.dist-info/RECORD +0 -33
- {erdo-0.1.4.dist-info → erdo-0.1.6.dist-info}/WHEEL +0 -0
- {erdo-0.1.4.dist-info → erdo-0.1.6.dist-info}/entry_points.txt +0 -0
- {erdo-0.1.4.dist-info → erdo-0.1.6.dist-info}/licenses/LICENSE +0 -0
erdo/sync/extractor.py
ADDED
|
@@ -0,0 +1,482 @@
|
|
|
1
|
+
"""Runtime extraction for syncing agents to the backend."""
|
|
2
|
+
|
|
3
|
+
import ast
|
|
4
|
+
import importlib.util
|
|
5
|
+
import inspect
|
|
6
|
+
import json
|
|
7
|
+
import os
|
|
8
|
+
import sys
|
|
9
|
+
import textwrap
|
|
10
|
+
import warnings
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import Any, Dict, List, Optional, Union
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class TemplateStringEncoder(json.JSONEncoder):
|
|
16
|
+
"""Custom JSON encoder that handles TemplateString objects and enums."""
|
|
17
|
+
|
|
18
|
+
def default(self, obj):
|
|
19
|
+
if hasattr(obj, "to_dict") and callable(getattr(obj, "to_dict")):
|
|
20
|
+
return obj.to_dict()
|
|
21
|
+
elif str(type(obj).__name__) == "TemplateString":
|
|
22
|
+
if hasattr(obj, "to_template_string") and callable(
|
|
23
|
+
getattr(obj, "to_template_string")
|
|
24
|
+
):
|
|
25
|
+
return obj.to_template_string()
|
|
26
|
+
elif hasattr(obj, "template"):
|
|
27
|
+
return obj.template
|
|
28
|
+
else:
|
|
29
|
+
return str(obj)
|
|
30
|
+
elif hasattr(obj, "__class__") and hasattr(obj.__class__, "__mro__"):
|
|
31
|
+
for base in obj.__class__.__mro__:
|
|
32
|
+
if "Enum" in str(base):
|
|
33
|
+
return obj.value if hasattr(obj, "value") else str(obj)
|
|
34
|
+
elif hasattr(obj, "_parent_path"):
|
|
35
|
+
return str(obj)
|
|
36
|
+
return super().default(obj)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def transform_string_value(value: str) -> str:
|
|
40
|
+
"""Transform a string value by converting state.field to {{.Data.field}} templates."""
|
|
41
|
+
if not isinstance(value, str):
|
|
42
|
+
return value
|
|
43
|
+
|
|
44
|
+
import re
|
|
45
|
+
|
|
46
|
+
pattern = r"\bstate\.([a-zA-Z_][a-zA-Z0-9_.]*)"
|
|
47
|
+
|
|
48
|
+
def replace_state_ref(match):
|
|
49
|
+
field_path = match.group(1)
|
|
50
|
+
return "{{.Data." + field_path + "}}"
|
|
51
|
+
|
|
52
|
+
transformed = re.sub(pattern, replace_state_ref, value)
|
|
53
|
+
return transformed
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def transform_dict_recursively(obj: Any) -> Any:
|
|
57
|
+
"""Recursively transform a dictionary/object, converting state references to templates."""
|
|
58
|
+
if hasattr(obj, "_parent_path") and hasattr(obj, "_tracker"):
|
|
59
|
+
field_path = obj._parent_path
|
|
60
|
+
return "{{.Data." + field_path + "}}"
|
|
61
|
+
elif (
|
|
62
|
+
hasattr(obj, "to_dict")
|
|
63
|
+
and hasattr(obj, "filename")
|
|
64
|
+
and "PythonFile" in str(type(obj))
|
|
65
|
+
):
|
|
66
|
+
return obj.to_dict()
|
|
67
|
+
elif isinstance(obj, dict):
|
|
68
|
+
return {k: transform_dict_recursively(v) for k, v in obj.items()}
|
|
69
|
+
elif isinstance(obj, list):
|
|
70
|
+
return [transform_dict_recursively(item) for item in obj]
|
|
71
|
+
elif isinstance(obj, str):
|
|
72
|
+
return transform_string_value(obj)
|
|
73
|
+
else:
|
|
74
|
+
return obj
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def convert_step_dict_to_step_with_handlers(step_dict: Dict) -> Dict:
|
|
78
|
+
"""Convert a step dictionary to StepWithHandlers format recursively."""
|
|
79
|
+
step_data = dict(step_dict)
|
|
80
|
+
result_handlers_list = step_data.pop("result_handlers", [])
|
|
81
|
+
converted_result_handlers = []
|
|
82
|
+
|
|
83
|
+
for handler in result_handlers_list:
|
|
84
|
+
converted_handler = dict(handler)
|
|
85
|
+
|
|
86
|
+
if "steps" in converted_handler and isinstance(
|
|
87
|
+
converted_handler["steps"], list
|
|
88
|
+
):
|
|
89
|
+
converted_steps = []
|
|
90
|
+
for step in converted_handler["steps"]:
|
|
91
|
+
if hasattr(step, "to_dict"):
|
|
92
|
+
converted_step = convert_step_to_step_with_handlers(step)
|
|
93
|
+
else:
|
|
94
|
+
converted_step = convert_step_dict_to_step_with_handlers(step)
|
|
95
|
+
converted_steps.append(converted_step)
|
|
96
|
+
converted_handler["steps"] = converted_steps
|
|
97
|
+
|
|
98
|
+
converted_result_handlers.append(converted_handler)
|
|
99
|
+
|
|
100
|
+
return {
|
|
101
|
+
"step": step_data,
|
|
102
|
+
"result_handlers": converted_result_handlers,
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def get_all_python_files_in_directory(
|
|
107
|
+
source_file_path: str, exclude_patterns: Optional[List[str]] = None
|
|
108
|
+
) -> List[Dict]:
|
|
109
|
+
"""Get all Python files in the same directory as the source file."""
|
|
110
|
+
if exclude_patterns is None:
|
|
111
|
+
exclude_patterns = ["__pycache__", "*.pyc", "test_*", "*_test.py"]
|
|
112
|
+
|
|
113
|
+
import fnmatch
|
|
114
|
+
|
|
115
|
+
source_dir = Path(source_file_path).parent
|
|
116
|
+
files = []
|
|
117
|
+
|
|
118
|
+
for py_file in source_dir.glob("*.py"):
|
|
119
|
+
if py_file.samefile(source_file_path):
|
|
120
|
+
continue
|
|
121
|
+
|
|
122
|
+
if any(fnmatch.fnmatch(py_file.name, pattern) for pattern in exclude_patterns):
|
|
123
|
+
continue
|
|
124
|
+
|
|
125
|
+
try:
|
|
126
|
+
with open(py_file, "r", encoding="utf-8") as f:
|
|
127
|
+
content = f.read()
|
|
128
|
+
files.append({"filename": py_file.name, "content": content})
|
|
129
|
+
except Exception as e:
|
|
130
|
+
print(f"Warning: Could not read {py_file}: {e}", file=sys.stderr)
|
|
131
|
+
|
|
132
|
+
return files
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def resolve_code_files_references(
|
|
136
|
+
code_files: List, source_file_path: str
|
|
137
|
+
) -> List[Dict]:
|
|
138
|
+
"""Resolve PythonFile references to actual file contents."""
|
|
139
|
+
if not code_files:
|
|
140
|
+
return []
|
|
141
|
+
|
|
142
|
+
resolved_files = []
|
|
143
|
+
source_dir = Path(source_file_path).parent
|
|
144
|
+
|
|
145
|
+
for file_ref in code_files:
|
|
146
|
+
if isinstance(file_ref, dict) and file_ref.get("_type") == "PythonFile":
|
|
147
|
+
filename_with_path = file_ref["filename"]
|
|
148
|
+
file_path = source_dir / filename_with_path
|
|
149
|
+
|
|
150
|
+
try:
|
|
151
|
+
with open(file_path, "r", encoding="utf-8") as f:
|
|
152
|
+
content = f.read()
|
|
153
|
+
base_filename = Path(filename_with_path).name
|
|
154
|
+
resolved_files.append({"filename": base_filename, "content": content})
|
|
155
|
+
except Exception as e:
|
|
156
|
+
print(
|
|
157
|
+
f"Warning: Could not resolve PythonFile {file_path}: {e}",
|
|
158
|
+
file=sys.stderr,
|
|
159
|
+
)
|
|
160
|
+
else:
|
|
161
|
+
resolved_files.append(file_ref)
|
|
162
|
+
|
|
163
|
+
return resolved_files
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
def extract_and_update_function_code(
|
|
167
|
+
step_obj: Any, step_dict: Dict, source_file_path: str
|
|
168
|
+
):
|
|
169
|
+
"""Extract function body from decorated codeexec.execute function and update step parameters."""
|
|
170
|
+
func_name = getattr(step_obj, "__name__", None)
|
|
171
|
+
if not func_name:
|
|
172
|
+
return
|
|
173
|
+
|
|
174
|
+
try:
|
|
175
|
+
with open(source_file_path, "r") as f:
|
|
176
|
+
source_content = f.read()
|
|
177
|
+
|
|
178
|
+
tree = ast.parse(source_content)
|
|
179
|
+
func_body = None
|
|
180
|
+
|
|
181
|
+
for node in ast.walk(tree):
|
|
182
|
+
if isinstance(node, ast.FunctionDef) and node.name == func_name:
|
|
183
|
+
source_lines = source_content.split("\n")
|
|
184
|
+
start_line = node.lineno
|
|
185
|
+
end_line = (
|
|
186
|
+
node.end_lineno
|
|
187
|
+
if hasattr(node, "end_lineno")
|
|
188
|
+
else len(source_lines)
|
|
189
|
+
)
|
|
190
|
+
body_lines = source_lines[start_line:end_line]
|
|
191
|
+
func_body = textwrap.dedent("\n".join(body_lines))
|
|
192
|
+
break
|
|
193
|
+
|
|
194
|
+
if func_body:
|
|
195
|
+
if "parameters" not in step_dict:
|
|
196
|
+
step_dict["parameters"] = {}
|
|
197
|
+
|
|
198
|
+
existing_code_files = step_dict["parameters"].get("code_files", [])
|
|
199
|
+
resolved_files = resolve_code_files_references(
|
|
200
|
+
existing_code_files, source_file_path
|
|
201
|
+
)
|
|
202
|
+
directory_files = get_all_python_files_in_directory(source_file_path)
|
|
203
|
+
resolved_files.extend(directory_files)
|
|
204
|
+
|
|
205
|
+
main_content = f"""# Function: {func_name}
|
|
206
|
+
import json
|
|
207
|
+
import sys
|
|
208
|
+
import os
|
|
209
|
+
from erdo.types import StepContext
|
|
210
|
+
|
|
211
|
+
def {func_name}(context):
|
|
212
|
+
\"\"\"Extracted function implementation.\"\"\"
|
|
213
|
+
{textwrap.indent(func_body, ' ')}
|
|
214
|
+
|
|
215
|
+
if __name__ == "__main__":
|
|
216
|
+
parameters_json = os.environ.get('STEP_PARAMETERS', '{{}}')
|
|
217
|
+
parameters = json.loads(parameters_json)
|
|
218
|
+
secrets_json = os.environ.get('STEP_SECRETS', '{{}}')
|
|
219
|
+
secrets = json.loads(secrets_json)
|
|
220
|
+
context = StepContext(parameters=parameters, secrets=secrets)
|
|
221
|
+
|
|
222
|
+
try:
|
|
223
|
+
result = {func_name}(context)
|
|
224
|
+
if result:
|
|
225
|
+
print(json.dumps(result))
|
|
226
|
+
except Exception as e:
|
|
227
|
+
print(f"Error: {{e}}", file=sys.stderr)
|
|
228
|
+
sys.exit(1)
|
|
229
|
+
"""
|
|
230
|
+
|
|
231
|
+
all_files = [{"filename": "main.py", "content": main_content}]
|
|
232
|
+
all_files.extend(resolved_files)
|
|
233
|
+
|
|
234
|
+
seen_filenames = set()
|
|
235
|
+
unique_files = []
|
|
236
|
+
for file_dict in all_files:
|
|
237
|
+
filename = file_dict.get("filename")
|
|
238
|
+
if filename and filename not in seen_filenames:
|
|
239
|
+
seen_filenames.add(filename)
|
|
240
|
+
unique_files.append(file_dict)
|
|
241
|
+
|
|
242
|
+
step_dict["parameters"]["code_files"] = unique_files
|
|
243
|
+
|
|
244
|
+
except Exception as e:
|
|
245
|
+
print(f"Warning: Failed to extract function {func_name}: {e}", file=sys.stderr)
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
def convert_step_to_step_with_handlers(
|
|
249
|
+
step_obj: Any, source_file_path: Optional[str] = None
|
|
250
|
+
) -> Dict:
|
|
251
|
+
"""Convert a Step object to StepWithHandlers format recursively."""
|
|
252
|
+
step_dict = step_obj.to_dict()
|
|
253
|
+
step_dict = transform_dict_recursively(step_dict)
|
|
254
|
+
|
|
255
|
+
if source_file_path and step_dict.get("parameters", {}).get("code_files"):
|
|
256
|
+
existing_code_files = step_dict["parameters"]["code_files"]
|
|
257
|
+
resolved_files = resolve_code_files_references(
|
|
258
|
+
existing_code_files, source_file_path
|
|
259
|
+
)
|
|
260
|
+
if resolved_files:
|
|
261
|
+
step_dict["parameters"]["code_files"] = resolved_files
|
|
262
|
+
|
|
263
|
+
if (
|
|
264
|
+
source_file_path
|
|
265
|
+
and hasattr(step_obj, "__name__")
|
|
266
|
+
and not any(
|
|
267
|
+
file_dict.get("content")
|
|
268
|
+
for file_dict in step_dict.get("parameters", {}).get("code_files", [])
|
|
269
|
+
)
|
|
270
|
+
):
|
|
271
|
+
try:
|
|
272
|
+
extract_and_update_function_code(step_obj, step_dict, source_file_path)
|
|
273
|
+
except Exception:
|
|
274
|
+
pass
|
|
275
|
+
|
|
276
|
+
return convert_step_dict_to_step_with_handlers(step_dict)
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
def extract_action_result_schemas(module: Any) -> Dict:
|
|
280
|
+
"""Extract action result schemas from parameter classes with _result attribute."""
|
|
281
|
+
result_schemas = {}
|
|
282
|
+
|
|
283
|
+
for name in dir(module):
|
|
284
|
+
obj = getattr(module, name)
|
|
285
|
+
if inspect.isclass(obj) and hasattr(obj, "_result"):
|
|
286
|
+
result_class = obj._result
|
|
287
|
+
if not inspect.isclass(result_class):
|
|
288
|
+
continue
|
|
289
|
+
|
|
290
|
+
action_name = None
|
|
291
|
+
if hasattr(obj, "model_fields"):
|
|
292
|
+
name_field = obj.model_fields.get("name")
|
|
293
|
+
if name_field and name_field.default:
|
|
294
|
+
action_name = name_field.default.split(".")[-1]
|
|
295
|
+
elif hasattr(obj, "__fields__"):
|
|
296
|
+
name_field = obj.__fields__.get("name")
|
|
297
|
+
if name_field and name_field.default:
|
|
298
|
+
action_name = name_field.default.split(".")[-1]
|
|
299
|
+
|
|
300
|
+
if action_name and result_class:
|
|
301
|
+
schema: Dict[str, Any] = {
|
|
302
|
+
"class_name": result_class.__name__,
|
|
303
|
+
"description": result_class.__doc__ or "",
|
|
304
|
+
"required_fields": [],
|
|
305
|
+
"optional_fields": [],
|
|
306
|
+
"field_types": {},
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
fields = {}
|
|
310
|
+
if hasattr(result_class, "model_fields"):
|
|
311
|
+
fields = result_class.model_fields
|
|
312
|
+
elif hasattr(result_class, "__fields__"):
|
|
313
|
+
fields = result_class.__fields__
|
|
314
|
+
|
|
315
|
+
for field_name, field_info in fields.items():
|
|
316
|
+
is_required = True
|
|
317
|
+
if hasattr(field_info, "is_required"):
|
|
318
|
+
is_required = field_info.is_required()
|
|
319
|
+
elif hasattr(field_info, "required"):
|
|
320
|
+
is_required = field_info.required
|
|
321
|
+
elif (
|
|
322
|
+
hasattr(field_info, "default")
|
|
323
|
+
and field_info.default is not None
|
|
324
|
+
):
|
|
325
|
+
is_required = False
|
|
326
|
+
|
|
327
|
+
if is_required:
|
|
328
|
+
schema["required_fields"].append(field_name)
|
|
329
|
+
else:
|
|
330
|
+
schema["optional_fields"].append(field_name)
|
|
331
|
+
|
|
332
|
+
field_type = "any"
|
|
333
|
+
if hasattr(field_info, "annotation"):
|
|
334
|
+
annotation = field_info.annotation
|
|
335
|
+
if annotation == str:
|
|
336
|
+
field_type = "string"
|
|
337
|
+
elif annotation == int:
|
|
338
|
+
field_type = "number"
|
|
339
|
+
elif annotation == bool:
|
|
340
|
+
field_type = "boolean"
|
|
341
|
+
elif annotation == list:
|
|
342
|
+
field_type = "array"
|
|
343
|
+
elif annotation == dict:
|
|
344
|
+
field_type = "object"
|
|
345
|
+
|
|
346
|
+
schema["field_types"][field_name] = field_type
|
|
347
|
+
|
|
348
|
+
result_schemas[action_name] = schema
|
|
349
|
+
|
|
350
|
+
return result_schemas
|
|
351
|
+
|
|
352
|
+
|
|
353
|
+
def extract_single_agent_data(
|
|
354
|
+
agent: Any, file_path: str, module: Optional[Any] = None
|
|
355
|
+
) -> Dict:
|
|
356
|
+
"""Extract data from a single agent."""
|
|
357
|
+
steps = getattr(agent, "steps", [])
|
|
358
|
+
step_dicts = []
|
|
359
|
+
|
|
360
|
+
for step in steps:
|
|
361
|
+
step_with_handlers = convert_step_to_step_with_handlers(step, file_path)
|
|
362
|
+
|
|
363
|
+
if hasattr(step, "_module_files") and step._module_files:
|
|
364
|
+
step_dict = step_with_handlers["step"]
|
|
365
|
+
if "parameters" not in step_dict:
|
|
366
|
+
step_dict["parameters"] = {}
|
|
367
|
+
|
|
368
|
+
code_files = []
|
|
369
|
+
for fp, content in step._module_files.items():
|
|
370
|
+
code_files.append({"filename": fp, "content": content})
|
|
371
|
+
|
|
372
|
+
step_dict["parameters"]["code_files"] = code_files
|
|
373
|
+
if hasattr(step, "_entrypoint"):
|
|
374
|
+
step_dict["parameters"]["entrypoint"] = step._entrypoint
|
|
375
|
+
|
|
376
|
+
step_dicts.append(step_with_handlers)
|
|
377
|
+
|
|
378
|
+
action_result_schemas = {}
|
|
379
|
+
if module:
|
|
380
|
+
action_result_schemas = extract_action_result_schemas(module)
|
|
381
|
+
|
|
382
|
+
source_code = ""
|
|
383
|
+
if file_path and os.path.exists(file_path):
|
|
384
|
+
with open(file_path, "r") as f:
|
|
385
|
+
source_code = f.read()
|
|
386
|
+
|
|
387
|
+
return {
|
|
388
|
+
"bot": {
|
|
389
|
+
"name": agent.name,
|
|
390
|
+
"key": getattr(agent, "key", None), # Include the bot key
|
|
391
|
+
"description": agent.description or "",
|
|
392
|
+
"visibility": agent.visibility,
|
|
393
|
+
"persona": agent.persona,
|
|
394
|
+
"running_message": agent.running_message,
|
|
395
|
+
"finished_message": agent.finished_message,
|
|
396
|
+
"source": "python",
|
|
397
|
+
},
|
|
398
|
+
"parameter_definitions": agent.parameter_definitions or [],
|
|
399
|
+
"steps": step_dicts,
|
|
400
|
+
"file_path": file_path,
|
|
401
|
+
"source_code": source_code,
|
|
402
|
+
"action_result_schemas": action_result_schemas,
|
|
403
|
+
}
|
|
404
|
+
|
|
405
|
+
|
|
406
|
+
def extract_agent_from_instance(
|
|
407
|
+
agent: Any, source_file_path: Optional[str] = None
|
|
408
|
+
) -> Dict:
|
|
409
|
+
"""Extract agent data from an Agent instance."""
|
|
410
|
+
if source_file_path:
|
|
411
|
+
# Try to load the module to get action schemas
|
|
412
|
+
try:
|
|
413
|
+
spec = importlib.util.spec_from_file_location(
|
|
414
|
+
"agent_module", source_file_path
|
|
415
|
+
)
|
|
416
|
+
if not spec or not spec.loader:
|
|
417
|
+
raise ValueError(f"Could not load module spec from {source_file_path}")
|
|
418
|
+
|
|
419
|
+
module = importlib.util.module_from_spec(spec)
|
|
420
|
+
spec.loader.exec_module(module)
|
|
421
|
+
except Exception:
|
|
422
|
+
module = None
|
|
423
|
+
else:
|
|
424
|
+
module = None
|
|
425
|
+
|
|
426
|
+
return extract_single_agent_data(agent, source_file_path or "", module)
|
|
427
|
+
|
|
428
|
+
|
|
429
|
+
def extract_agents_from_file(file_path: str) -> Union[Dict, List[Dict]]:
|
|
430
|
+
"""Extract agent(s) from a Python file."""
|
|
431
|
+
if not os.path.exists(file_path):
|
|
432
|
+
raise FileNotFoundError(f"File does not exist: {file_path}")
|
|
433
|
+
|
|
434
|
+
# Check if file has agents
|
|
435
|
+
with open(file_path, "r") as f:
|
|
436
|
+
source = f.read()
|
|
437
|
+
|
|
438
|
+
tree = ast.parse(source)
|
|
439
|
+
has_agents = False
|
|
440
|
+
|
|
441
|
+
for node in ast.walk(tree):
|
|
442
|
+
if isinstance(node, ast.Assign):
|
|
443
|
+
for target in node.targets:
|
|
444
|
+
if isinstance(target, ast.Name) and target.id == "agents":
|
|
445
|
+
has_agents = True
|
|
446
|
+
break
|
|
447
|
+
|
|
448
|
+
if not has_agents:
|
|
449
|
+
raise ValueError("No 'agents = [...]' assignment found in file")
|
|
450
|
+
|
|
451
|
+
# Load the module
|
|
452
|
+
file_dir = os.path.dirname(file_path)
|
|
453
|
+
if file_dir not in sys.path:
|
|
454
|
+
sys.path.insert(0, file_dir)
|
|
455
|
+
|
|
456
|
+
spec = importlib.util.spec_from_file_location("target_module", file_path)
|
|
457
|
+
if not spec or not spec.loader:
|
|
458
|
+
raise ValueError(f"Could not load module from {file_path}")
|
|
459
|
+
|
|
460
|
+
module = importlib.util.module_from_spec(spec)
|
|
461
|
+
|
|
462
|
+
with warnings.catch_warnings():
|
|
463
|
+
warnings.simplefilter("ignore")
|
|
464
|
+
spec.loader.exec_module(module)
|
|
465
|
+
|
|
466
|
+
if not hasattr(module, "agents"):
|
|
467
|
+
raise ValueError("No 'agents' list found in the module")
|
|
468
|
+
|
|
469
|
+
agents_list = getattr(module, "agents")
|
|
470
|
+
if not isinstance(agents_list, list) or len(agents_list) == 0:
|
|
471
|
+
raise ValueError("'agents' must be a non-empty list")
|
|
472
|
+
|
|
473
|
+
# Check if we're extracting all agents or just one
|
|
474
|
+
if len(agents_list) > 1 and file_path.endswith("__init__.py"):
|
|
475
|
+
result = []
|
|
476
|
+
for agent in agents_list:
|
|
477
|
+
agent_data = extract_single_agent_data(agent, file_path, module)
|
|
478
|
+
result.append(agent_data)
|
|
479
|
+
return result
|
|
480
|
+
else:
|
|
481
|
+
agent = agents_list[0]
|
|
482
|
+
return extract_single_agent_data(agent, file_path, module)
|