agent-framework-devui 1.0.0b251001__py3-none-any.whl → 1.0.0b251016__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of agent-framework-devui might be problematic. Click here for more details.

@@ -174,7 +174,7 @@ class DevServer:
174
174
 
175
175
  @app.get("/v1/entities/{entity_id}/info", response_model=EntityInfo)
176
176
  async def get_entity_info(entity_id: str) -> EntityInfo:
177
- """Get detailed information about a specific entity."""
177
+ """Get detailed information about a specific entity (triggers lazy loading)."""
178
178
  try:
179
179
  executor = await self._ensure_executor()
180
180
  entity_info = executor.get_entity_info(entity_id)
@@ -182,80 +182,96 @@ class DevServer:
182
182
  if not entity_info:
183
183
  raise HTTPException(status_code=404, detail=f"Entity {entity_id} not found")
184
184
 
185
+ # Trigger lazy loading if entity not yet loaded
186
+ # This will import the module and enrich metadata
187
+ entity_obj = await executor.entity_discovery.load_entity(entity_id)
188
+
189
+ # Get updated entity info (may have been enriched during load)
190
+ entity_info = executor.get_entity_info(entity_id) or entity_info
191
+
185
192
  # For workflows, populate additional detailed information
186
- if entity_info.type == "workflow":
187
- entity_obj = executor.entity_discovery.get_entity_object(entity_id)
188
- if entity_obj:
189
- # Get workflow structure
190
- workflow_dump = None
191
- if hasattr(entity_obj, "to_dict") and callable(getattr(entity_obj, "to_dict", None)):
192
- try:
193
- workflow_dump = entity_obj.to_dict() # type: ignore[attr-defined]
194
- except Exception:
195
- workflow_dump = None
196
- elif hasattr(entity_obj, "to_json") and callable(getattr(entity_obj, "to_json", None)):
197
- try:
198
- raw_dump = entity_obj.to_json() # type: ignore[attr-defined]
199
- except Exception:
200
- workflow_dump = None
201
- else:
202
- if isinstance(raw_dump, (bytes, bytearray)):
203
- try:
204
- raw_dump = raw_dump.decode()
205
- except Exception:
206
- raw_dump = raw_dump.decode(errors="replace")
207
- if isinstance(raw_dump, str):
208
- try:
209
- parsed_dump = json.loads(raw_dump)
210
- except Exception:
211
- workflow_dump = raw_dump
212
- else:
213
- workflow_dump = parsed_dump if isinstance(parsed_dump, dict) else raw_dump
214
- else:
193
+ if entity_info.type == "workflow" and entity_obj:
194
+ # Entity object already loaded by load_entity() above
195
+ # Get workflow structure
196
+ workflow_dump = None
197
+ if hasattr(entity_obj, "to_dict") and callable(getattr(entity_obj, "to_dict", None)):
198
+ try:
199
+ workflow_dump = entity_obj.to_dict() # type: ignore[attr-defined]
200
+ except Exception:
201
+ workflow_dump = None
202
+ elif hasattr(entity_obj, "to_json") and callable(getattr(entity_obj, "to_json", None)):
203
+ try:
204
+ raw_dump = entity_obj.to_json() # type: ignore[attr-defined]
205
+ except Exception:
206
+ workflow_dump = None
207
+ else:
208
+ if isinstance(raw_dump, (bytes, bytearray)):
209
+ try:
210
+ raw_dump = raw_dump.decode()
211
+ except Exception:
212
+ raw_dump = raw_dump.decode(errors="replace")
213
+ if isinstance(raw_dump, str):
214
+ try:
215
+ parsed_dump = json.loads(raw_dump)
216
+ except Exception:
215
217
  workflow_dump = raw_dump
216
- elif hasattr(entity_obj, "__dict__"):
217
- workflow_dump = {k: v for k, v in entity_obj.__dict__.items() if not k.startswith("_")}
218
+ else:
219
+ workflow_dump = parsed_dump if isinstance(parsed_dump, dict) else raw_dump
220
+ else:
221
+ workflow_dump = raw_dump
222
+ elif hasattr(entity_obj, "__dict__"):
223
+ workflow_dump = {k: v for k, v in entity_obj.__dict__.items() if not k.startswith("_")}
218
224
 
219
- # Get input schema information
220
- input_schema = {}
221
- input_type_name = "Unknown"
222
- start_executor_id = ""
225
+ # Get input schema information
226
+ input_schema = {}
227
+ input_type_name = "Unknown"
228
+ start_executor_id = ""
223
229
 
224
- try:
225
- start_executor = entity_obj.get_start_executor()
226
- if start_executor and hasattr(start_executor, "_handlers"):
227
- message_types = list(start_executor._handlers.keys())
228
- if message_types:
229
- input_type = message_types[0]
230
- input_type_name = getattr(input_type, "__name__", str(input_type))
231
-
232
- # Basic schema generation for common types
233
- if input_type is str:
234
- input_schema = {"type": "string"}
235
- elif input_type is dict:
236
- input_schema = {"type": "object"}
237
- elif hasattr(input_type, "model_json_schema"):
238
- input_schema = input_type.model_json_schema()
239
-
240
- start_executor_id = getattr(start_executor, "executor_id", "")
241
- except Exception as e:
242
- logger.debug(f"Could not extract input info for workflow {entity_id}: {e}")
243
-
244
- # Get executor list
245
- executor_list = []
246
- if hasattr(entity_obj, "executors") and entity_obj.executors:
247
- executor_list = [getattr(ex, "executor_id", str(ex)) for ex in entity_obj.executors]
248
-
249
- # Create copy of entity info and populate workflow-specific fields
250
- update_payload: dict[str, Any] = {
251
- "workflow_dump": workflow_dump,
252
- "input_schema": input_schema,
253
- "input_type_name": input_type_name,
254
- "start_executor_id": start_executor_id,
255
- }
256
- if executor_list:
257
- update_payload["executors"] = executor_list
258
- return entity_info.model_copy(update=update_payload)
230
+ try:
231
+ from ._utils import (
232
+ extract_executor_message_types,
233
+ generate_input_schema,
234
+ select_primary_input_type,
235
+ )
236
+
237
+ start_executor = entity_obj.get_start_executor()
238
+ except Exception as e:
239
+ logger.debug(f"Could not extract input info for workflow {entity_id}: {e}")
240
+ else:
241
+ if start_executor:
242
+ start_executor_id = getattr(start_executor, "executor_id", "") or getattr(
243
+ start_executor, "id", ""
244
+ )
245
+
246
+ message_types = extract_executor_message_types(start_executor)
247
+ input_type = select_primary_input_type(message_types)
248
+
249
+ if input_type:
250
+ input_type_name = getattr(input_type, "__name__", str(input_type))
251
+
252
+ # Generate schema using comprehensive schema generation
253
+ input_schema = generate_input_schema(input_type)
254
+
255
+ if not input_schema:
256
+ input_schema = {"type": "string"}
257
+ if input_type_name == "Unknown":
258
+ input_type_name = "string"
259
+
260
+ # Get executor list
261
+ executor_list = []
262
+ if hasattr(entity_obj, "executors") and entity_obj.executors:
263
+ executor_list = [getattr(ex, "executor_id", str(ex)) for ex in entity_obj.executors]
264
+
265
+ # Create copy of entity info and populate workflow-specific fields
266
+ update_payload: dict[str, Any] = {
267
+ "workflow_dump": workflow_dump,
268
+ "input_schema": input_schema,
269
+ "input_type_name": input_type_name,
270
+ "start_executor_id": start_executor_id,
271
+ }
272
+ if executor_list:
273
+ update_payload["executors"] = executor_list
274
+ return entity_info.model_copy(update=update_payload)
259
275
 
260
276
  # For non-workflow entities, return as-is
261
277
  return entity_info
@@ -266,70 +282,34 @@ class DevServer:
266
282
  logger.error(f"Error getting entity info for {entity_id}: {e}")
267
283
  raise HTTPException(status_code=500, detail=f"Failed to get entity info: {e!s}") from e
268
284
 
269
- @app.post("/v1/entities/add")
270
- async def add_entity(request: dict[str, Any]) -> dict[str, Any]:
271
- """Add entity from URL."""
272
- try:
273
- url = request.get("url")
274
- metadata = request.get("metadata", {})
275
-
276
- if not url:
277
- raise HTTPException(status_code=400, detail="URL is required")
278
-
279
- logger.info(f"Attempting to add entity from URL: {url}")
280
- executor = await self._ensure_executor()
281
- entity_info, error_msg = await executor.entity_discovery.fetch_remote_entity(url, metadata)
285
+ @app.post("/v1/entities/{entity_id}/reload")
286
+ async def reload_entity(entity_id: str) -> dict[str, Any]:
287
+ """Hot reload entity (clears cache, will reimport on next access).
282
288
 
283
- if not entity_info:
284
- # Sanitize error message - only return safe, user-friendly errors
285
- logger.error(f"Failed to fetch or validate entity from {url}: {error_msg}")
286
- safe_error = error_msg if error_msg else "Failed to fetch or validate entity"
287
- raise HTTPException(status_code=400, detail=safe_error)
288
-
289
- logger.info(f"Successfully added entity: {entity_info.id}")
290
- return {"success": True, "entity": entity_info.model_dump()}
291
-
292
- except HTTPException:
293
- raise
294
- except Exception as e:
295
- logger.error(f"Error adding entity: {e}", exc_info=True)
296
- # Don't expose internal error details to client
297
- raise HTTPException(
298
- status_code=500, detail="An unexpected error occurred while adding the entity"
299
- ) from e
300
-
301
- @app.delete("/v1/entities/{entity_id}")
302
- async def remove_entity(entity_id: str) -> dict[str, Any]:
303
- """Remove entity by ID."""
289
+ This enables hot reload during development - edit entity code, call this endpoint,
290
+ and the next execution will use the updated code without server restart.
291
+ """
304
292
  try:
305
293
  executor = await self._ensure_executor()
306
294
 
307
- # Cleanup entity resources before removal
308
- try:
309
- entity_obj = executor.entity_discovery.get_entity_object(entity_id)
310
- if entity_obj and hasattr(entity_obj, "chat_client"):
311
- client = entity_obj.chat_client
312
- if hasattr(client, "close") and callable(client.close):
313
- if inspect.iscoroutinefunction(client.close):
314
- await client.close()
315
- else:
316
- client.close()
317
- logger.info(f"Closed client for entity: {entity_id}")
318
- except Exception as e:
319
- logger.warning(f"Error closing entity {entity_id} during removal: {e}")
295
+ # Check if entity exists
296
+ entity_info = executor.get_entity_info(entity_id)
297
+ if not entity_info:
298
+ raise HTTPException(status_code=404, detail=f"Entity {entity_id} not found")
320
299
 
321
- # Remove entity from registry
322
- success = executor.entity_discovery.remove_remote_entity(entity_id)
300
+ # Invalidate cache
301
+ executor.entity_discovery.invalidate_entity(entity_id)
323
302
 
324
- if success:
325
- return {"success": True}
326
- raise HTTPException(status_code=404, detail="Entity not found or cannot be removed")
303
+ return {
304
+ "success": True,
305
+ "message": f"Entity '{entity_id}' cache cleared. Will reload on next access.",
306
+ }
327
307
 
328
308
  except HTTPException:
329
309
  raise
330
310
  except Exception as e:
331
- logger.error(f"Error removing entity {entity_id}: {e}")
332
- raise HTTPException(status_code=500, detail=f"Failed to remove entity: {e!s}") from e
311
+ logger.error(f"Error reloading entity {entity_id}: {e}")
312
+ raise HTTPException(status_code=500, detail=f"Failed to reload entity: {e!s}") from e
333
313
 
334
314
  @app.post("/v1/responses")
335
315
  async def create_response(request: AgentFrameworkRequest, raw_request: Request) -> Any:
@@ -374,112 +354,161 @@ class DevServer:
374
354
  error = OpenAIError.create(f"Execution failed: {e!s}")
375
355
  return JSONResponse(status_code=500, content=error.to_dict())
376
356
 
377
- @app.post("/v1/threads")
378
- async def create_thread(request_data: dict[str, Any]) -> dict[str, Any]:
379
- """Create a new thread for an agent."""
357
+ # ========================================
358
+ # OpenAI Conversations API (Standard)
359
+ # ========================================
360
+
361
+ @app.post("/v1/conversations")
362
+ async def create_conversation(request_data: dict[str, Any]) -> dict[str, Any]:
363
+ """Create a new conversation - OpenAI standard."""
380
364
  try:
381
- agent_id = request_data.get("agent_id")
382
- if not agent_id:
383
- raise HTTPException(status_code=400, detail="agent_id is required")
365
+ metadata = request_data.get("metadata")
366
+ executor = await self._ensure_executor()
367
+ conversation = executor.conversation_store.create_conversation(metadata=metadata)
368
+ return conversation.model_dump()
369
+ except HTTPException:
370
+ raise
371
+ except Exception as e:
372
+ logger.error(f"Error creating conversation: {e}")
373
+ raise HTTPException(status_code=500, detail=f"Failed to create conversation: {e!s}") from e
384
374
 
375
+ @app.get("/v1/conversations")
376
+ async def list_conversations(agent_id: str | None = None) -> dict[str, Any]:
377
+ """List conversations, optionally filtered by agent_id."""
378
+ try:
385
379
  executor = await self._ensure_executor()
386
- thread_id = executor.create_thread(agent_id)
380
+
381
+ if agent_id:
382
+ # Filter by agent_id metadata
383
+ conversations = executor.conversation_store.list_conversations_by_metadata({"agent_id": agent_id})
384
+ else:
385
+ # Return all conversations (for InMemoryStore, list all)
386
+ # Note: This assumes list_conversations_by_metadata({}) returns all
387
+ conversations = executor.conversation_store.list_conversations_by_metadata({})
387
388
 
388
389
  return {
389
- "id": thread_id,
390
- "object": "thread",
391
- "created_at": int(__import__("time").time()),
392
- "metadata": {"agent_id": agent_id},
390
+ "object": "list",
391
+ "data": [conv.model_dump() for conv in conversations],
392
+ "has_more": False,
393
393
  }
394
394
  except HTTPException:
395
395
  raise
396
396
  except Exception as e:
397
- logger.error(f"Error creating thread: {e}")
398
- raise HTTPException(status_code=500, detail=f"Failed to create thread: {e!s}") from e
397
+ logger.error(f"Error listing conversations: {e}")
398
+ raise HTTPException(status_code=500, detail=f"Failed to list conversations: {e!s}") from e
399
399
 
400
- @app.get("/v1/threads")
401
- async def list_threads(agent_id: str) -> dict[str, Any]:
402
- """List threads for an agent."""
400
+ @app.get("/v1/conversations/{conversation_id}")
401
+ async def retrieve_conversation(conversation_id: str) -> dict[str, Any]:
402
+ """Get conversation - OpenAI standard."""
403
403
  try:
404
404
  executor = await self._ensure_executor()
405
- thread_ids = executor.list_threads_for_agent(agent_id)
406
-
407
- # Convert thread IDs to thread objects
408
- threads = []
409
- for thread_id in thread_ids:
410
- threads.append({"id": thread_id, "object": "thread", "agent_id": agent_id})
411
-
412
- return {"object": "list", "data": threads}
405
+ conversation = executor.conversation_store.get_conversation(conversation_id)
406
+ if not conversation:
407
+ raise HTTPException(status_code=404, detail="Conversation not found")
408
+ return conversation.model_dump()
409
+ except HTTPException:
410
+ raise
413
411
  except Exception as e:
414
- logger.error(f"Error listing threads: {e}")
415
- raise HTTPException(status_code=500, detail=f"Failed to list threads: {e!s}") from e
412
+ logger.error(f"Error getting conversation {conversation_id}: {e}")
413
+ raise HTTPException(status_code=500, detail=f"Failed to get conversation: {e!s}") from e
416
414
 
417
- @app.get("/v1/threads/{thread_id}")
418
- async def get_thread(thread_id: str) -> dict[str, Any]:
419
- """Get thread information."""
415
+ @app.post("/v1/conversations/{conversation_id}")
416
+ async def update_conversation(conversation_id: str, request_data: dict[str, Any]) -> dict[str, Any]:
417
+ """Update conversation metadata - OpenAI standard."""
420
418
  try:
421
419
  executor = await self._ensure_executor()
422
-
423
- # Check if thread exists
424
- thread = executor.get_thread(thread_id)
425
- if not thread:
426
- raise HTTPException(status_code=404, detail="Thread not found")
427
-
428
- # Get the agent that owns this thread
429
- agent_id = executor.get_agent_for_thread(thread_id)
430
-
431
- return {"id": thread_id, "object": "thread", "agent_id": agent_id}
420
+ metadata = request_data.get("metadata", {})
421
+ conversation = executor.conversation_store.update_conversation(conversation_id, metadata=metadata)
422
+ return conversation.model_dump()
423
+ except ValueError as e:
424
+ raise HTTPException(status_code=404, detail=str(e)) from e
432
425
  except HTTPException:
433
426
  raise
434
427
  except Exception as e:
435
- logger.error(f"Error getting thread {thread_id}: {e}")
436
- raise HTTPException(status_code=500, detail=f"Failed to get thread: {e!s}") from e
428
+ logger.error(f"Error updating conversation {conversation_id}: {e}")
429
+ raise HTTPException(status_code=500, detail=f"Failed to update conversation: {e!s}") from e
437
430
 
438
- @app.delete("/v1/threads/{thread_id}")
439
- async def delete_thread(thread_id: str) -> dict[str, Any]:
440
- """Delete a thread."""
431
+ @app.delete("/v1/conversations/{conversation_id}")
432
+ async def delete_conversation(conversation_id: str) -> dict[str, Any]:
433
+ """Delete conversation - OpenAI standard."""
441
434
  try:
442
435
  executor = await self._ensure_executor()
443
- success = executor.delete_thread(thread_id)
444
-
445
- if not success:
446
- raise HTTPException(status_code=404, detail="Thread not found")
447
-
448
- return {"id": thread_id, "object": "thread.deleted", "deleted": True}
436
+ result = executor.conversation_store.delete_conversation(conversation_id)
437
+ return result.model_dump()
438
+ except ValueError as e:
439
+ raise HTTPException(status_code=404, detail=str(e)) from e
449
440
  except HTTPException:
450
441
  raise
451
442
  except Exception as e:
452
- logger.error(f"Error deleting thread {thread_id}: {e}")
453
- raise HTTPException(status_code=500, detail=f"Failed to delete thread: {e!s}") from e
443
+ logger.error(f"Error deleting conversation {conversation_id}: {e}")
444
+ raise HTTPException(status_code=500, detail=f"Failed to delete conversation: {e!s}") from e
454
445
 
455
- @app.get("/v1/threads/{thread_id}/messages")
456
- async def get_thread_messages(thread_id: str) -> dict[str, Any]:
457
- """Get messages from a thread."""
446
+ @app.post("/v1/conversations/{conversation_id}/items")
447
+ async def create_conversation_items(conversation_id: str, request_data: dict[str, Any]) -> dict[str, Any]:
448
+ """Add items to conversation - OpenAI standard."""
458
449
  try:
459
450
  executor = await self._ensure_executor()
451
+ items = request_data.get("items", [])
452
+ conv_items = await executor.conversation_store.add_items(conversation_id, items=items)
453
+ return {"object": "list", "data": [item.model_dump() for item in conv_items]}
454
+ except ValueError as e:
455
+ raise HTTPException(status_code=404, detail=str(e)) from e
456
+ except HTTPException:
457
+ raise
458
+ except Exception as e:
459
+ logger.error(f"Error adding items to conversation {conversation_id}: {e}")
460
+ raise HTTPException(status_code=500, detail=f"Failed to add items: {e!s}") from e
461
+
462
+ @app.get("/v1/conversations/{conversation_id}/items")
463
+ async def list_conversation_items(
464
+ conversation_id: str, limit: int = 100, after: str | None = None, order: str = "asc"
465
+ ) -> dict[str, Any]:
466
+ """List conversation items - OpenAI standard."""
467
+ try:
468
+ executor = await self._ensure_executor()
469
+ items, has_more = await executor.conversation_store.list_items(
470
+ conversation_id, limit=limit, after=after, order=order
471
+ )
472
+ return {
473
+ "object": "list",
474
+ "data": [item.model_dump() for item in items],
475
+ "has_more": has_more,
476
+ }
477
+ except ValueError as e:
478
+ raise HTTPException(status_code=404, detail=str(e)) from e
479
+ except HTTPException:
480
+ raise
481
+ except Exception as e:
482
+ logger.error(f"Error listing items for conversation {conversation_id}: {e}")
483
+ raise HTTPException(status_code=500, detail=f"Failed to list items: {e!s}") from e
460
484
 
461
- # Check if thread exists
462
- thread = executor.get_thread(thread_id)
463
- if not thread:
464
- raise HTTPException(status_code=404, detail="Thread not found")
465
-
466
- # Get messages from thread
467
- messages = await executor.get_thread_messages(thread_id)
468
-
469
- return {"object": "list", "data": messages, "thread_id": thread_id}
485
+ @app.get("/v1/conversations/{conversation_id}/items/{item_id}")
486
+ async def retrieve_conversation_item(conversation_id: str, item_id: str) -> dict[str, Any]:
487
+ """Get specific conversation item - OpenAI standard."""
488
+ try:
489
+ executor = await self._ensure_executor()
490
+ item = executor.conversation_store.get_item(conversation_id, item_id)
491
+ if not item:
492
+ raise HTTPException(status_code=404, detail="Item not found")
493
+ return item.model_dump()
470
494
  except HTTPException:
471
495
  raise
472
496
  except Exception as e:
473
- logger.error(f"Error getting messages for thread {thread_id}: {e}")
474
- raise HTTPException(status_code=500, detail=f"Failed to get thread messages: {e!s}") from e
497
+ logger.error(f"Error getting item {item_id} from conversation {conversation_id}: {e}")
498
+ raise HTTPException(status_code=500, detail=f"Failed to get item: {e!s}") from e
475
499
 
476
500
  async def _stream_execution(
477
501
  self, executor: AgentFrameworkExecutor, request: AgentFrameworkRequest
478
502
  ) -> AsyncGenerator[str, None]:
479
503
  """Stream execution directly through executor."""
480
504
  try:
481
- # Direct call to executor - simple and clean
505
+ # Collect events for final response.completed event
506
+ events = []
507
+
508
+ # Stream all events
482
509
  async for event in executor.execute_streaming(request):
510
+ events.append(event)
511
+
483
512
  # IMPORTANT: Check model_dump_json FIRST because to_json() can have newlines (pretty-printing)
484
513
  # which breaks SSE format. model_dump_json() returns single-line JSON.
485
514
  if hasattr(event, "model_dump_json"):
@@ -497,6 +526,17 @@ class DevServer:
497
526
  payload = json.dumps(str(event))
498
527
  yield f"data: {payload}\n\n"
499
528
 
529
+ # Aggregate to final response and emit response.completed event (OpenAI standard)
530
+ from .models import ResponseCompletedEvent
531
+
532
+ final_response = await executor.message_mapper.aggregate_to_response(events, request)
533
+ completed_event = ResponseCompletedEvent(
534
+ type="response.completed",
535
+ response=final_response,
536
+ sequence_number=len(events),
537
+ )
538
+ yield f"data: {completed_event.model_dump_json()}\n\n"
539
+
500
540
  # Send final done event
501
541
  yield "data: [DONE]\n\n"
502
542
 
@@ -67,7 +67,7 @@ class SessionManager:
67
67
  logger.debug(f"Closed session: {session_id}")
68
68
 
69
69
  def add_request_record(
70
- self, session_id: str, entity_id: str, executor_name: str, request_input: Any, model: str
70
+ self, session_id: str, entity_id: str, executor_name: str, request_input: Any, model_id: str
71
71
  ) -> str:
72
72
  """Add a request record to a session.
73
73
 
@@ -76,7 +76,7 @@ class SessionManager:
76
76
  entity_id: ID of the entity being executed
77
77
  executor_name: Name of the executor
78
78
  request_input: Input for the request
79
- model: Model name
79
+ model_id: Model name
80
80
 
81
81
  Returns:
82
82
  Request ID
@@ -91,7 +91,7 @@ class SessionManager:
91
91
  "entity_id": entity_id,
92
92
  "executor": executor_name,
93
93
  "input": request_input,
94
- "model": model,
94
+ "model_id": model_id,
95
95
  "stream": True,
96
96
  }
97
97
  session["requests"].append(request_record)