agent-framework-devui 1.0.0b251007__py3-none-any.whl → 1.0.0b251016__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of agent-framework-devui might be problematic. Click here for more details.
- agent_framework_devui/_conversations.py +473 -0
- agent_framework_devui/_discovery.py +295 -325
- agent_framework_devui/_executor.py +99 -241
- agent_framework_devui/_mapper.py +281 -78
- agent_framework_devui/_server.py +232 -239
- agent_framework_devui/_utils.py +127 -0
- agent_framework_devui/models/__init__.py +15 -10
- agent_framework_devui/models/_discovery_models.py +1 -2
- agent_framework_devui/models/_openai_custom.py +45 -90
- agent_framework_devui/ui/assets/index-CE4pGoXh.css +1 -0
- agent_framework_devui/ui/assets/index-DmL7WSFa.js +577 -0
- agent_framework_devui/ui/index.html +2 -2
- agent_framework_devui-1.0.0b251016.dist-info/METADATA +286 -0
- agent_framework_devui-1.0.0b251016.dist-info/RECORD +23 -0
- agent_framework_devui/ui/assets/index-D0SfShuZ.js +0 -445
- agent_framework_devui/ui/assets/index-WsCIE0bH.css +0 -1
- agent_framework_devui-1.0.0b251007.dist-info/METADATA +0 -172
- agent_framework_devui-1.0.0b251007.dist-info/RECORD +0 -22
- {agent_framework_devui-1.0.0b251007.dist-info → agent_framework_devui-1.0.0b251016.dist-info}/WHEEL +0 -0
- {agent_framework_devui-1.0.0b251007.dist-info → agent_framework_devui-1.0.0b251016.dist-info}/entry_points.txt +0 -0
- {agent_framework_devui-1.0.0b251007.dist-info → agent_framework_devui-1.0.0b251016.dist-info}/licenses/LICENSE +0 -0
|
@@ -4,7 +4,6 @@
|
|
|
4
4
|
|
|
5
5
|
from __future__ import annotations
|
|
6
6
|
|
|
7
|
-
import hashlib
|
|
8
7
|
import importlib
|
|
9
8
|
import importlib.util
|
|
10
9
|
import logging
|
|
@@ -13,7 +12,6 @@ import uuid
|
|
|
13
12
|
from pathlib import Path
|
|
14
13
|
from typing import Any
|
|
15
14
|
|
|
16
|
-
import httpx
|
|
17
15
|
from dotenv import load_dotenv
|
|
18
16
|
|
|
19
17
|
from .models._discovery_models import EntityInfo
|
|
@@ -33,7 +31,6 @@ class EntityDiscovery:
|
|
|
33
31
|
self.entities_dir = entities_dir
|
|
34
32
|
self._entities: dict[str, EntityInfo] = {}
|
|
35
33
|
self._loaded_objects: dict[str, Any] = {}
|
|
36
|
-
self._remote_cache_dir = Path.home() / ".agent_framework_devui" / "remote_cache"
|
|
37
34
|
|
|
38
35
|
async def discover_entities(self) -> list[EntityInfo]:
|
|
39
36
|
"""Scan for Agent Framework entities.
|
|
@@ -73,6 +70,115 @@ class EntityDiscovery:
|
|
|
73
70
|
"""
|
|
74
71
|
return self._loaded_objects.get(entity_id)
|
|
75
72
|
|
|
73
|
+
async def load_entity(self, entity_id: str) -> Any:
|
|
74
|
+
"""Load entity on-demand (lazy loading).
|
|
75
|
+
|
|
76
|
+
This method implements lazy loading by importing the entity module only when needed.
|
|
77
|
+
In-memory entities are returned from cache immediately.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
entity_id: Entity identifier
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
Loaded entity object
|
|
84
|
+
|
|
85
|
+
Raises:
|
|
86
|
+
ValueError: If entity not found or cannot be loaded
|
|
87
|
+
"""
|
|
88
|
+
# Check if already loaded (includes in-memory entities)
|
|
89
|
+
if entity_id in self._loaded_objects:
|
|
90
|
+
logger.debug(f"Entity {entity_id} already loaded (cache hit)")
|
|
91
|
+
return self._loaded_objects[entity_id]
|
|
92
|
+
|
|
93
|
+
# Get entity metadata
|
|
94
|
+
entity_info = self._entities.get(entity_id)
|
|
95
|
+
if not entity_info:
|
|
96
|
+
raise ValueError(f"Entity {entity_id} not found in registry")
|
|
97
|
+
|
|
98
|
+
# In-memory entities should never reach here (they're pre-loaded)
|
|
99
|
+
if entity_info.source == "in_memory":
|
|
100
|
+
raise ValueError(f"In-memory entity {entity_id} missing from loaded objects cache")
|
|
101
|
+
|
|
102
|
+
logger.info(f"Lazy loading entity: {entity_id} (source: {entity_info.source})")
|
|
103
|
+
|
|
104
|
+
# Load based on source - only directory and in-memory are supported
|
|
105
|
+
if entity_info.source == "directory":
|
|
106
|
+
entity_obj = await self._load_directory_entity(entity_id, entity_info)
|
|
107
|
+
else:
|
|
108
|
+
raise ValueError(
|
|
109
|
+
f"Unsupported entity source: {entity_info.source}. "
|
|
110
|
+
f"Only 'directory' and 'in_memory' sources are supported."
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
# Enrich metadata with actual entity data
|
|
114
|
+
# Don't pass entity_type if it's "unknown" - let inference determine the real type
|
|
115
|
+
enriched_info = await self.create_entity_info_from_object(
|
|
116
|
+
entity_obj,
|
|
117
|
+
entity_type=entity_info.type if entity_info.type != "unknown" else None,
|
|
118
|
+
source=entity_info.source,
|
|
119
|
+
)
|
|
120
|
+
# IMPORTANT: Preserve the original entity_id (enrichment generates a new one)
|
|
121
|
+
enriched_info.id = entity_id
|
|
122
|
+
# Preserve the original path from sparse metadata
|
|
123
|
+
if "path" in entity_info.metadata:
|
|
124
|
+
enriched_info.metadata["path"] = entity_info.metadata["path"]
|
|
125
|
+
enriched_info.metadata["lazy_loaded"] = True
|
|
126
|
+
self._entities[entity_id] = enriched_info
|
|
127
|
+
|
|
128
|
+
# Cache the loaded object
|
|
129
|
+
self._loaded_objects[entity_id] = entity_obj
|
|
130
|
+
logger.info(f"✅ Successfully loaded entity: {entity_id} (type: {enriched_info.type})")
|
|
131
|
+
|
|
132
|
+
return entity_obj
|
|
133
|
+
|
|
134
|
+
async def _load_directory_entity(self, entity_id: str, entity_info: EntityInfo) -> Any:
|
|
135
|
+
"""Load entity from directory (imports module).
|
|
136
|
+
|
|
137
|
+
Args:
|
|
138
|
+
entity_id: Entity identifier
|
|
139
|
+
entity_info: Entity metadata
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
Loaded entity object
|
|
143
|
+
"""
|
|
144
|
+
# Get directory path from metadata
|
|
145
|
+
dir_path = Path(entity_info.metadata.get("path", ""))
|
|
146
|
+
if not dir_path.exists(): # noqa: ASYNC240
|
|
147
|
+
raise ValueError(f"Entity directory not found: {dir_path}")
|
|
148
|
+
|
|
149
|
+
# Load .env if it exists
|
|
150
|
+
if dir_path.is_dir(): # noqa: ASYNC240
|
|
151
|
+
self._load_env_for_entity(dir_path)
|
|
152
|
+
else:
|
|
153
|
+
self._load_env_for_entity(dir_path.parent)
|
|
154
|
+
|
|
155
|
+
# Import the module
|
|
156
|
+
if dir_path.is_dir(): # noqa: ASYNC240
|
|
157
|
+
# Directory-based entity - try different import patterns
|
|
158
|
+
import_patterns = [
|
|
159
|
+
entity_id,
|
|
160
|
+
f"{entity_id}.agent",
|
|
161
|
+
f"{entity_id}.workflow",
|
|
162
|
+
]
|
|
163
|
+
|
|
164
|
+
for pattern in import_patterns:
|
|
165
|
+
module = self._load_module_from_pattern(pattern)
|
|
166
|
+
if module:
|
|
167
|
+
# Find entity in module - pass entity_id so registration uses correct ID
|
|
168
|
+
entity_obj = await self._find_entity_in_module(module, entity_id, str(dir_path))
|
|
169
|
+
if entity_obj:
|
|
170
|
+
return entity_obj
|
|
171
|
+
|
|
172
|
+
raise ValueError(f"No valid entity found in {dir_path}")
|
|
173
|
+
# File-based entity
|
|
174
|
+
module = self._load_module_from_file(dir_path, entity_id)
|
|
175
|
+
if module:
|
|
176
|
+
entity_obj = await self._find_entity_in_module(module, entity_id, str(dir_path))
|
|
177
|
+
if entity_obj:
|
|
178
|
+
return entity_obj
|
|
179
|
+
|
|
180
|
+
raise ValueError(f"No valid entity found in {dir_path}")
|
|
181
|
+
|
|
76
182
|
def list_entities(self) -> list[EntityInfo]:
|
|
77
183
|
"""List all discovered entities.
|
|
78
184
|
|
|
@@ -81,6 +187,48 @@ class EntityDiscovery:
|
|
|
81
187
|
"""
|
|
82
188
|
return list(self._entities.values())
|
|
83
189
|
|
|
190
|
+
def invalidate_entity(self, entity_id: str) -> None:
|
|
191
|
+
"""Invalidate (clear cache for) an entity to enable hot reload.
|
|
192
|
+
|
|
193
|
+
This removes the entity from the loaded objects cache and clears its module
|
|
194
|
+
from Python's sys.modules cache. The entity metadata remains, so it will be
|
|
195
|
+
reimported on next access.
|
|
196
|
+
|
|
197
|
+
Args:
|
|
198
|
+
entity_id: Entity identifier to invalidate
|
|
199
|
+
"""
|
|
200
|
+
# Remove from loaded objects cache
|
|
201
|
+
if entity_id in self._loaded_objects:
|
|
202
|
+
del self._loaded_objects[entity_id]
|
|
203
|
+
logger.info(f"Cleared loaded object cache for: {entity_id}")
|
|
204
|
+
|
|
205
|
+
# Clear from Python's module cache (including submodules)
|
|
206
|
+
keys_to_delete = [
|
|
207
|
+
module_name
|
|
208
|
+
for module_name in sys.modules
|
|
209
|
+
if module_name == entity_id or module_name.startswith(f"{entity_id}.")
|
|
210
|
+
]
|
|
211
|
+
for key in keys_to_delete:
|
|
212
|
+
del sys.modules[key]
|
|
213
|
+
logger.debug(f"Cleared module cache: {key}")
|
|
214
|
+
|
|
215
|
+
# Reset lazy_loaded flag in metadata
|
|
216
|
+
entity_info = self._entities.get(entity_id)
|
|
217
|
+
if entity_info and "lazy_loaded" in entity_info.metadata:
|
|
218
|
+
entity_info.metadata["lazy_loaded"] = False
|
|
219
|
+
|
|
220
|
+
logger.info(f"♻️ Entity invalidated: {entity_id} (will reload on next access)")
|
|
221
|
+
|
|
222
|
+
def invalidate_all(self) -> None:
|
|
223
|
+
"""Invalidate all cached entities.
|
|
224
|
+
|
|
225
|
+
Useful for forcing a complete reload of all entities.
|
|
226
|
+
"""
|
|
227
|
+
entity_ids = list(self._loaded_objects.keys())
|
|
228
|
+
for entity_id in entity_ids:
|
|
229
|
+
self.invalidate_entity(entity_id)
|
|
230
|
+
logger.info(f"Invalidated {len(entity_ids)} entities")
|
|
231
|
+
|
|
84
232
|
def register_entity(self, entity_id: str, entity_info: EntityInfo, entity_object: Any) -> None:
|
|
85
233
|
"""Register an entity with both metadata and object.
|
|
86
234
|
|
|
@@ -116,16 +264,9 @@ class EntityDiscovery:
|
|
|
116
264
|
# Extract metadata with improved fallback naming
|
|
117
265
|
name = getattr(entity_object, "name", None)
|
|
118
266
|
if not name:
|
|
119
|
-
# In-memory entities: use
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
# Truncate UUID to first 8 characters for readability
|
|
123
|
-
short_id = str(entity_id_raw)[:8] if len(str(entity_id_raw)) > 8 else str(entity_id_raw)
|
|
124
|
-
name = f"{entity_type.title()} {short_id}"
|
|
125
|
-
else:
|
|
126
|
-
# Fallback to class name with entity type
|
|
127
|
-
class_name = entity_object.__class__.__name__
|
|
128
|
-
name = f"{entity_type.title()} {class_name}"
|
|
267
|
+
# In-memory entities: use class name as it's more readable than UUID
|
|
268
|
+
class_name = entity_object.__class__.__name__
|
|
269
|
+
name = f"{entity_type.title()} {class_name}"
|
|
129
270
|
description = getattr(entity_object, "description", "")
|
|
130
271
|
|
|
131
272
|
# Generate entity ID using Agent Framework specific naming
|
|
@@ -142,43 +283,27 @@ class EntityDiscovery:
|
|
|
142
283
|
middleware_list = None
|
|
143
284
|
|
|
144
285
|
if entity_type == "agent":
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
and entity_object.context_provider
|
|
167
|
-
and hasattr(entity_object.context_provider, "__class__")
|
|
168
|
-
):
|
|
169
|
-
context_providers_list = [entity_object.context_provider.__class__.__name__]
|
|
170
|
-
|
|
171
|
-
# Try to get middleware
|
|
172
|
-
if hasattr(entity_object, "middleware") and entity_object.middleware:
|
|
173
|
-
middleware_list = []
|
|
174
|
-
for m in entity_object.middleware:
|
|
175
|
-
# Try multiple ways to get a good name for middleware
|
|
176
|
-
if hasattr(m, "__name__"): # Function or callable
|
|
177
|
-
middleware_list.append(m.__name__)
|
|
178
|
-
elif hasattr(m, "__class__"): # Class instance
|
|
179
|
-
middleware_list.append(m.__class__.__name__)
|
|
180
|
-
else:
|
|
181
|
-
middleware_list.append(str(m))
|
|
286
|
+
from ._utils import extract_agent_metadata
|
|
287
|
+
|
|
288
|
+
agent_meta = extract_agent_metadata(entity_object)
|
|
289
|
+
instructions = agent_meta["instructions"]
|
|
290
|
+
model = agent_meta["model"]
|
|
291
|
+
chat_client_type = agent_meta["chat_client_type"]
|
|
292
|
+
context_providers_list = agent_meta["context_providers"]
|
|
293
|
+
middleware_list = agent_meta["middleware"]
|
|
294
|
+
|
|
295
|
+
# Log helpful info about agent capabilities (before creating EntityInfo)
|
|
296
|
+
if entity_type == "agent":
|
|
297
|
+
has_run_stream = hasattr(entity_object, "run_stream")
|
|
298
|
+
has_run = hasattr(entity_object, "run")
|
|
299
|
+
|
|
300
|
+
if not has_run_stream and has_run:
|
|
301
|
+
logger.info(
|
|
302
|
+
f"Agent '{entity_id}' only has run() (non-streaming). "
|
|
303
|
+
"DevUI will automatically convert to streaming."
|
|
304
|
+
)
|
|
305
|
+
elif not has_run_stream and not has_run:
|
|
306
|
+
logger.warning(f"Agent '{entity_id}' lacks both run() and run_stream() methods. May not work.")
|
|
182
307
|
|
|
183
308
|
# Create EntityInfo with Agent Framework specifics
|
|
184
309
|
return EntityInfo(
|
|
@@ -206,7 +331,10 @@ class EntityDiscovery:
|
|
|
206
331
|
)
|
|
207
332
|
|
|
208
333
|
async def _scan_entities_directory(self, entities_dir: Path) -> None:
|
|
209
|
-
"""Scan the entities directory for Agent Framework entities.
|
|
334
|
+
"""Scan the entities directory for Agent Framework entities (lazy loading).
|
|
335
|
+
|
|
336
|
+
This method scans the filesystem WITHOUT importing modules, creating sparse
|
|
337
|
+
metadata that will be enriched on-demand when entities are accessed.
|
|
210
338
|
|
|
211
339
|
Args:
|
|
212
340
|
entities_dir: Directory to scan for entities
|
|
@@ -215,78 +343,120 @@ class EntityDiscovery:
|
|
|
215
343
|
logger.warning(f"Entities directory not found: {entities_dir}")
|
|
216
344
|
return
|
|
217
345
|
|
|
218
|
-
logger.info(f"Scanning {entities_dir} for Agent Framework entities...")
|
|
346
|
+
logger.info(f"Scanning {entities_dir} for Agent Framework entities (lazy mode)...")
|
|
219
347
|
|
|
220
348
|
# Add entities directory to Python path if not already there
|
|
221
349
|
entities_dir_str = str(entities_dir)
|
|
222
350
|
if entities_dir_str not in sys.path:
|
|
223
351
|
sys.path.insert(0, entities_dir_str)
|
|
224
352
|
|
|
225
|
-
# Scan for directories and Python files
|
|
353
|
+
# Scan for directories and Python files WITHOUT importing
|
|
226
354
|
for item in entities_dir.iterdir(): # noqa: ASYNC240
|
|
227
355
|
if item.name.startswith(".") or item.name == "__pycache__":
|
|
228
356
|
continue
|
|
229
357
|
|
|
230
|
-
if item.is_dir():
|
|
231
|
-
# Directory-based entity
|
|
232
|
-
|
|
358
|
+
if item.is_dir() and self._looks_like_entity(item):
|
|
359
|
+
# Directory-based entity - create sparse metadata
|
|
360
|
+
self._register_sparse_entity(item)
|
|
233
361
|
elif item.is_file() and item.suffix == ".py" and not item.name.startswith("_"):
|
|
234
|
-
# Single file entity
|
|
235
|
-
|
|
362
|
+
# Single file entity - create sparse metadata
|
|
363
|
+
self._register_sparse_file_entity(item)
|
|
236
364
|
|
|
237
|
-
|
|
238
|
-
"""
|
|
365
|
+
def _looks_like_entity(self, dir_path: Path) -> bool:
|
|
366
|
+
"""Check if directory contains an entity (without importing).
|
|
239
367
|
|
|
240
368
|
Args:
|
|
241
|
-
dir_path: Directory
|
|
242
|
-
"""
|
|
243
|
-
entity_id = dir_path.name
|
|
244
|
-
logger.debug(f"Scanning directory: {entity_id}")
|
|
369
|
+
dir_path: Directory to check
|
|
245
370
|
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
371
|
+
Returns:
|
|
372
|
+
True if directory appears to contain an entity
|
|
373
|
+
"""
|
|
374
|
+
return (
|
|
375
|
+
(dir_path / "agent.py").exists()
|
|
376
|
+
or (dir_path / "workflow.py").exists()
|
|
377
|
+
or (dir_path / "__init__.py").exists()
|
|
378
|
+
)
|
|
249
379
|
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
entity_id, # Direct module import
|
|
253
|
-
f"{entity_id}.agent", # agent.py submodule
|
|
254
|
-
f"{entity_id}.workflow", # workflow.py submodule
|
|
255
|
-
]
|
|
380
|
+
def _detect_entity_type(self, dir_path: Path) -> str:
|
|
381
|
+
"""Detect entity type from directory structure (without importing).
|
|
256
382
|
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
if entities_found:
|
|
262
|
-
logger.debug(f"Found {len(entities_found)} entities in {pattern}")
|
|
263
|
-
break
|
|
383
|
+
Uses filename conventions to determine entity type:
|
|
384
|
+
- workflow.py → "workflow"
|
|
385
|
+
- agent.py → "agent"
|
|
386
|
+
- both or neither → "unknown"
|
|
264
387
|
|
|
265
|
-
|
|
266
|
-
|
|
388
|
+
Args:
|
|
389
|
+
dir_path: Directory to analyze
|
|
267
390
|
|
|
268
|
-
|
|
269
|
-
|
|
391
|
+
Returns:
|
|
392
|
+
Entity type: "workflow", "agent", or "unknown"
|
|
393
|
+
"""
|
|
394
|
+
has_agent = (dir_path / "agent.py").exists()
|
|
395
|
+
has_workflow = (dir_path / "workflow.py").exists()
|
|
396
|
+
|
|
397
|
+
if has_agent and has_workflow:
|
|
398
|
+
# Both files exist - ambiguous, mark as unknown
|
|
399
|
+
return "unknown"
|
|
400
|
+
if has_workflow:
|
|
401
|
+
return "workflow"
|
|
402
|
+
if has_agent:
|
|
403
|
+
return "agent"
|
|
404
|
+
# Has __init__.py but no specific file
|
|
405
|
+
return "unknown"
|
|
406
|
+
|
|
407
|
+
def _register_sparse_entity(self, dir_path: Path) -> None:
|
|
408
|
+
"""Register entity with sparse metadata (no import).
|
|
270
409
|
|
|
271
410
|
Args:
|
|
272
|
-
|
|
411
|
+
dir_path: Entity directory
|
|
273
412
|
"""
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
self._load_env_for_entity(file_path.parent)
|
|
413
|
+
entity_id = dir_path.name
|
|
414
|
+
entity_type = self._detect_entity_type(dir_path)
|
|
277
415
|
|
|
278
|
-
|
|
279
|
-
|
|
416
|
+
entity_info = EntityInfo(
|
|
417
|
+
id=entity_id,
|
|
418
|
+
name=entity_id.replace("_", " ").title(),
|
|
419
|
+
type=entity_type,
|
|
420
|
+
framework="agent_framework",
|
|
421
|
+
tools=[], # Sparse - will be populated on load
|
|
422
|
+
description="", # Sparse - will be populated on load
|
|
423
|
+
source="directory",
|
|
424
|
+
metadata={
|
|
425
|
+
"path": str(dir_path),
|
|
426
|
+
"discovered": True,
|
|
427
|
+
"lazy_loaded": False,
|
|
428
|
+
},
|
|
429
|
+
)
|
|
280
430
|
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
if module:
|
|
284
|
-
entities_found = await self._find_entities_in_module(module, base_name, str(file_path))
|
|
285
|
-
if entities_found:
|
|
286
|
-
logger.debug(f"Found {len(entities_found)} entities in {file_path.name}")
|
|
431
|
+
self._entities[entity_id] = entity_info
|
|
432
|
+
logger.debug(f"Registered sparse entity: {entity_id} (type: {entity_type})")
|
|
287
433
|
|
|
288
|
-
|
|
289
|
-
|
|
434
|
+
def _register_sparse_file_entity(self, file_path: Path) -> None:
|
|
435
|
+
"""Register file-based entity with sparse metadata (no import).
|
|
436
|
+
|
|
437
|
+
Args:
|
|
438
|
+
file_path: Entity Python file
|
|
439
|
+
"""
|
|
440
|
+
entity_id = file_path.stem
|
|
441
|
+
|
|
442
|
+
# File-based entities are typically agents, but we can't know for sure without importing
|
|
443
|
+
entity_info = EntityInfo(
|
|
444
|
+
id=entity_id,
|
|
445
|
+
name=entity_id.replace("_", " ").title(),
|
|
446
|
+
type="unknown", # Will be determined on load
|
|
447
|
+
framework="agent_framework",
|
|
448
|
+
tools=[],
|
|
449
|
+
description="",
|
|
450
|
+
source="directory",
|
|
451
|
+
metadata={
|
|
452
|
+
"path": str(file_path),
|
|
453
|
+
"discovered": True,
|
|
454
|
+
"lazy_loaded": False,
|
|
455
|
+
},
|
|
456
|
+
)
|
|
457
|
+
|
|
458
|
+
self._entities[entity_id] = entity_info
|
|
459
|
+
logger.debug(f"Registered sparse file entity: {entity_id}")
|
|
290
460
|
|
|
291
461
|
def _load_env_for_entity(self, entity_path: Path) -> bool:
|
|
292
462
|
"""Load .env file for an entity.
|
|
@@ -378,19 +548,17 @@ class EntityDiscovery:
|
|
|
378
548
|
logger.warning(f"Error loading module from {file_path}: {e}")
|
|
379
549
|
return None
|
|
380
550
|
|
|
381
|
-
async def
|
|
382
|
-
"""Find agent
|
|
551
|
+
async def _find_entity_in_module(self, module: Any, entity_id: str, module_path: str) -> Any:
|
|
552
|
+
"""Find agent or workflow entity in a loaded module.
|
|
383
553
|
|
|
384
554
|
Args:
|
|
385
555
|
module: Loaded Python module
|
|
386
|
-
|
|
556
|
+
entity_id: Expected entity identifier to register with
|
|
387
557
|
module_path: Path to module for metadata
|
|
388
558
|
|
|
389
559
|
Returns:
|
|
390
|
-
|
|
560
|
+
Loaded entity object, or None if not found
|
|
391
561
|
"""
|
|
392
|
-
entities_found = []
|
|
393
|
-
|
|
394
562
|
# Look for explicit variable names first
|
|
395
563
|
candidates = [
|
|
396
564
|
("agent", getattr(module, "agent", None)),
|
|
@@ -402,11 +570,12 @@ class EntityDiscovery:
|
|
|
402
570
|
continue
|
|
403
571
|
|
|
404
572
|
if self._is_valid_entity(obj, obj_type):
|
|
405
|
-
#
|
|
406
|
-
|
|
407
|
-
|
|
573
|
+
# Register with the correct entity_id (from directory name)
|
|
574
|
+
# Store the object directly in _loaded_objects so we can return it
|
|
575
|
+
self._loaded_objects[entity_id] = obj
|
|
576
|
+
return obj
|
|
408
577
|
|
|
409
|
-
return
|
|
578
|
+
return None
|
|
410
579
|
|
|
411
580
|
def _is_valid_entity(self, obj: Any, expected_type: str) -> bool:
|
|
412
581
|
"""Check if object is a valid agent or workflow using duck typing.
|
|
@@ -444,7 +613,9 @@ class EntityDiscovery:
|
|
|
444
613
|
pass
|
|
445
614
|
|
|
446
615
|
# Fallback to duck typing for agent protocol
|
|
447
|
-
|
|
616
|
+
# Agent must have either run_stream() or run() method, plus id and name
|
|
617
|
+
has_execution_method = hasattr(obj, "run_stream") or hasattr(obj, "run")
|
|
618
|
+
if has_execution_method and hasattr(obj, "id") and hasattr(obj, "name"):
|
|
448
619
|
return True
|
|
449
620
|
|
|
450
621
|
except (TypeError, AttributeError):
|
|
@@ -482,13 +653,9 @@ class EntityDiscovery:
|
|
|
482
653
|
# Extract metadata from the live object with improved fallback naming
|
|
483
654
|
name = getattr(obj, "name", None)
|
|
484
655
|
if not name:
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
short_id = str(entity_id_raw)[:8] if len(str(entity_id_raw)) > 8 else str(entity_id_raw)
|
|
489
|
-
name = f"{obj_type.title()} {short_id}"
|
|
490
|
-
else:
|
|
491
|
-
name = f"{obj_type.title()} {obj.__class__.__name__}"
|
|
656
|
+
# Use class name as it's more readable than UUID
|
|
657
|
+
class_name = obj.__class__.__name__
|
|
658
|
+
name = f"{obj_type.title()} {class_name}"
|
|
492
659
|
description = getattr(obj, "description", None)
|
|
493
660
|
tools = await self._extract_tools_from_object(obj, obj_type)
|
|
494
661
|
|
|
@@ -505,39 +672,14 @@ class EntityDiscovery:
|
|
|
505
672
|
middleware_list = None
|
|
506
673
|
|
|
507
674
|
if obj_type == "agent":
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
model = obj.chat_client.model_id
|
|
517
|
-
|
|
518
|
-
# Try to get chat client type
|
|
519
|
-
if hasattr(obj, "chat_client"):
|
|
520
|
-
chat_client_type = obj.chat_client.__class__.__name__
|
|
521
|
-
|
|
522
|
-
# Try to get context providers
|
|
523
|
-
if (
|
|
524
|
-
hasattr(obj, "context_provider")
|
|
525
|
-
and obj.context_provider
|
|
526
|
-
and hasattr(obj.context_provider, "__class__")
|
|
527
|
-
):
|
|
528
|
-
context_providers_list = [obj.context_provider.__class__.__name__]
|
|
529
|
-
|
|
530
|
-
# Try to get middleware
|
|
531
|
-
if hasattr(obj, "middleware") and obj.middleware:
|
|
532
|
-
middleware_list = []
|
|
533
|
-
for m in obj.middleware:
|
|
534
|
-
# Try multiple ways to get a good name for middleware
|
|
535
|
-
if hasattr(m, "__name__"): # Function or callable
|
|
536
|
-
middleware_list.append(m.__name__)
|
|
537
|
-
elif hasattr(m, "__class__"): # Class instance
|
|
538
|
-
middleware_list.append(m.__class__.__name__)
|
|
539
|
-
else:
|
|
540
|
-
middleware_list.append(str(m))
|
|
675
|
+
from ._utils import extract_agent_metadata
|
|
676
|
+
|
|
677
|
+
agent_meta = extract_agent_metadata(obj)
|
|
678
|
+
instructions = agent_meta["instructions"]
|
|
679
|
+
model = agent_meta["model"]
|
|
680
|
+
chat_client_type = agent_meta["chat_client_type"]
|
|
681
|
+
context_providers_list = agent_meta["context_providers"]
|
|
682
|
+
middleware_list = agent_meta["middleware"]
|
|
541
683
|
|
|
542
684
|
entity_info = EntityInfo(
|
|
543
685
|
id=entity_id,
|
|
@@ -628,7 +770,7 @@ class EntityDiscovery:
|
|
|
628
770
|
source: Source of entity (directory, in_memory, remote)
|
|
629
771
|
|
|
630
772
|
Returns:
|
|
631
|
-
Unique entity ID with format: {type}_{source}_{name}_{
|
|
773
|
+
Unique entity ID with format: {type}_{source}_{name}_{uuid}
|
|
632
774
|
"""
|
|
633
775
|
import re
|
|
634
776
|
|
|
@@ -644,179 +786,7 @@ class EntityDiscovery:
|
|
|
644
786
|
else:
|
|
645
787
|
base_name = "entity"
|
|
646
788
|
|
|
647
|
-
# Generate
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
return f"{entity_type}_{source}_{base_name}_{short_uuid}"
|
|
651
|
-
|
|
652
|
-
async def fetch_remote_entity(
|
|
653
|
-
self, url: str, metadata: dict[str, Any] | None = None
|
|
654
|
-
) -> tuple[EntityInfo | None, str | None]:
|
|
655
|
-
"""Fetch and register entity from URL.
|
|
656
|
-
|
|
657
|
-
Args:
|
|
658
|
-
url: URL to Python file containing entity
|
|
659
|
-
metadata: Additional metadata (source, sampleId, etc.)
|
|
660
|
-
|
|
661
|
-
Returns:
|
|
662
|
-
Tuple of (EntityInfo if successful, error_message if failed)
|
|
663
|
-
"""
|
|
664
|
-
try:
|
|
665
|
-
normalized_url = self._normalize_url(url)
|
|
666
|
-
logger.info(f"Normalized URL: {normalized_url}")
|
|
667
|
-
|
|
668
|
-
content = await self._fetch_url_content(normalized_url)
|
|
669
|
-
if not content:
|
|
670
|
-
error_msg = "Failed to fetch content from URL. The file may not exist or is not accessible."
|
|
671
|
-
logger.warning(error_msg)
|
|
672
|
-
return None, error_msg
|
|
673
|
-
|
|
674
|
-
if not self._validate_python_syntax(content):
|
|
675
|
-
error_msg = "Invalid Python syntax in the file. Please check the file contains valid Python code."
|
|
676
|
-
logger.warning(error_msg)
|
|
677
|
-
return None, error_msg
|
|
678
|
-
|
|
679
|
-
entity_object = await self._load_entity_from_content(content, url)
|
|
680
|
-
if not entity_object:
|
|
681
|
-
error_msg = (
|
|
682
|
-
"No valid agent or workflow found in the file. "
|
|
683
|
-
"Make sure the file contains an 'agent' or 'workflow' variable."
|
|
684
|
-
)
|
|
685
|
-
logger.warning(error_msg)
|
|
686
|
-
return None, error_msg
|
|
687
|
-
|
|
688
|
-
entity_info = await self.create_entity_info_from_object(
|
|
689
|
-
entity_object,
|
|
690
|
-
entity_type=None, # Auto-detect
|
|
691
|
-
source="remote",
|
|
692
|
-
)
|
|
693
|
-
|
|
694
|
-
entity_info.source = metadata.get("source", "remote_gallery") if metadata else "remote_gallery"
|
|
695
|
-
entity_info.original_url = url
|
|
696
|
-
if metadata:
|
|
697
|
-
entity_info.metadata.update(metadata)
|
|
698
|
-
|
|
699
|
-
self.register_entity(entity_info.id, entity_info, entity_object)
|
|
789
|
+
# Generate full UUID for guaranteed uniqueness
|
|
790
|
+
full_uuid = uuid.uuid4().hex
|
|
700
791
|
|
|
701
|
-
|
|
702
|
-
return entity_info, None
|
|
703
|
-
|
|
704
|
-
except Exception as e:
|
|
705
|
-
error_msg = f"Unexpected error: {e!s}"
|
|
706
|
-
logger.error(f"Error fetching remote entity from {url}: {e}", exc_info=True)
|
|
707
|
-
return None, error_msg
|
|
708
|
-
|
|
709
|
-
def _normalize_url(self, url: str) -> str:
|
|
710
|
-
"""Convert various Git hosting URLs to raw content URLs."""
|
|
711
|
-
# GitHub: blob -> raw
|
|
712
|
-
if "github.com" in url and "/blob/" in url:
|
|
713
|
-
return url.replace("github.com", "raw.githubusercontent.com").replace("/blob/", "/")
|
|
714
|
-
|
|
715
|
-
# GitLab: blob -> raw
|
|
716
|
-
if "gitlab.com" in url and "/-/blob/" in url:
|
|
717
|
-
return url.replace("/-/blob/", "/-/raw/")
|
|
718
|
-
|
|
719
|
-
# Bitbucket: src -> raw
|
|
720
|
-
if "bitbucket.org" in url and "/src/" in url:
|
|
721
|
-
return url.replace("/src/", "/raw/")
|
|
722
|
-
|
|
723
|
-
return url
|
|
724
|
-
|
|
725
|
-
async def _fetch_url_content(self, url: str, max_size_mb: int = 10) -> str | None:
|
|
726
|
-
"""Fetch content from URL with size and timeout limits."""
|
|
727
|
-
try:
|
|
728
|
-
timeout = 30.0 # 30 second timeout
|
|
729
|
-
|
|
730
|
-
async with httpx.AsyncClient(timeout=timeout) as client:
|
|
731
|
-
response = await client.get(url)
|
|
732
|
-
|
|
733
|
-
if response.status_code != 200:
|
|
734
|
-
logger.warning(f"HTTP {response.status_code} for {url}")
|
|
735
|
-
return None
|
|
736
|
-
|
|
737
|
-
# Check content length
|
|
738
|
-
content_length = response.headers.get("content-length")
|
|
739
|
-
if content_length and int(content_length) > max_size_mb * 1024 * 1024:
|
|
740
|
-
logger.warning(f"File too large: {content_length} bytes")
|
|
741
|
-
return None
|
|
742
|
-
|
|
743
|
-
# Read with size limit
|
|
744
|
-
content = response.text
|
|
745
|
-
if len(content.encode("utf-8")) > max_size_mb * 1024 * 1024:
|
|
746
|
-
logger.warning("Content too large after reading")
|
|
747
|
-
return None
|
|
748
|
-
|
|
749
|
-
return content
|
|
750
|
-
|
|
751
|
-
except Exception as e:
|
|
752
|
-
logger.error(f"Error fetching {url}: {e}")
|
|
753
|
-
return None
|
|
754
|
-
|
|
755
|
-
def _validate_python_syntax(self, content: str) -> bool:
|
|
756
|
-
"""Validate that content is valid Python code."""
|
|
757
|
-
try:
|
|
758
|
-
compile(content, "<remote>", "exec")
|
|
759
|
-
return True
|
|
760
|
-
except SyntaxError as e:
|
|
761
|
-
logger.warning(f"Python syntax error: {e}")
|
|
762
|
-
return False
|
|
763
|
-
|
|
764
|
-
async def _load_entity_from_content(self, content: str, source_url: str) -> Any | None:
|
|
765
|
-
"""Load entity object from Python content string using disk-based import.
|
|
766
|
-
|
|
767
|
-
This method caches remote entities to disk and uses importlib for loading,
|
|
768
|
-
making it consistent with local entity discovery and avoiding exec() security warnings.
|
|
769
|
-
"""
|
|
770
|
-
try:
|
|
771
|
-
# Create cache directory if it doesn't exist
|
|
772
|
-
self._remote_cache_dir.mkdir(parents=True, exist_ok=True)
|
|
773
|
-
|
|
774
|
-
# Generate a unique filename based on URL hash
|
|
775
|
-
url_hash = hashlib.sha256(source_url.encode()).hexdigest()[:16]
|
|
776
|
-
module_name = f"remote_entity_{url_hash}"
|
|
777
|
-
cached_file = self._remote_cache_dir / f"{module_name}.py"
|
|
778
|
-
|
|
779
|
-
# Write content to cache file
|
|
780
|
-
cached_file.write_text(content, encoding="utf-8")
|
|
781
|
-
logger.debug(f"Cached remote entity to {cached_file}")
|
|
782
|
-
|
|
783
|
-
# Load module from cached file using importlib (same as local scanning)
|
|
784
|
-
module = self._load_module_from_file(cached_file, module_name)
|
|
785
|
-
if not module:
|
|
786
|
-
logger.warning(f"Failed to load module from cached file: {cached_file}")
|
|
787
|
-
return None
|
|
788
|
-
|
|
789
|
-
# Look for agent or workflow objects in the loaded module
|
|
790
|
-
for name in dir(module):
|
|
791
|
-
if name.startswith("_"):
|
|
792
|
-
continue
|
|
793
|
-
|
|
794
|
-
obj = getattr(module, name)
|
|
795
|
-
|
|
796
|
-
# Check for explicitly named entities first
|
|
797
|
-
if name in ["agent", "workflow"] and self._is_valid_entity(obj, name):
|
|
798
|
-
return obj
|
|
799
|
-
|
|
800
|
-
# Also check if any object looks like an agent/workflow
|
|
801
|
-
if self._is_valid_agent(obj) or self._is_valid_workflow(obj):
|
|
802
|
-
return obj
|
|
803
|
-
|
|
804
|
-
return None
|
|
805
|
-
|
|
806
|
-
except Exception as e:
|
|
807
|
-
logger.error(f"Error loading entity from content: {e}")
|
|
808
|
-
return None
|
|
809
|
-
|
|
810
|
-
def remove_remote_entity(self, entity_id: str) -> bool:
|
|
811
|
-
"""Remove a remote entity by ID."""
|
|
812
|
-
if entity_id in self._entities:
|
|
813
|
-
entity_info = self._entities[entity_id]
|
|
814
|
-
if entity_info.source in ["remote_gallery", "remote"]:
|
|
815
|
-
del self._entities[entity_id]
|
|
816
|
-
if entity_id in self._loaded_objects:
|
|
817
|
-
del self._loaded_objects[entity_id]
|
|
818
|
-
logger.info(f"Removed remote entity: {entity_id}")
|
|
819
|
-
return True
|
|
820
|
-
logger.warning(f"Cannot remove local entity: {entity_id}")
|
|
821
|
-
return False
|
|
822
|
-
return False
|
|
792
|
+
return f"{entity_type}_{source}_{base_name}_{full_uuid}"
|