agent-framework-devui 1.0.0b251001__py3-none-any.whl → 1.0.0b251016__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of agent-framework-devui might be problematic. Click here for more details.

@@ -0,0 +1,548 @@
1
+ # Copyright (c) Microsoft. All rights reserved.
2
+
3
+ """Utility functions for DevUI."""
4
+
5
+ import inspect
6
+ import json
7
+ import logging
8
+ from dataclasses import fields, is_dataclass
9
+ from typing import Any, get_args, get_origin
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+ # ============================================================================
14
+ # Agent Metadata Extraction
15
+ # ============================================================================
16
+
17
+
18
+ def extract_agent_metadata(entity_object: Any) -> dict[str, Any]:
19
+ """Extract agent-specific metadata from an entity object.
20
+
21
+ Args:
22
+ entity_object: Agent Framework agent object
23
+
24
+ Returns:
25
+ Dictionary with agent metadata: instructions, model, chat_client_type,
26
+ context_providers, and middleware
27
+ """
28
+ metadata = {
29
+ "instructions": None,
30
+ "model": None,
31
+ "chat_client_type": None,
32
+ "context_providers": None,
33
+ "middleware": None,
34
+ }
35
+
36
+ # Try to get instructions
37
+ if hasattr(entity_object, "chat_options") and hasattr(entity_object.chat_options, "instructions"):
38
+ metadata["instructions"] = entity_object.chat_options.instructions
39
+
40
+ # Try to get model - check both chat_options and chat_client
41
+ if (
42
+ hasattr(entity_object, "chat_options")
43
+ and hasattr(entity_object.chat_options, "model_id")
44
+ and entity_object.chat_options.model_id
45
+ ):
46
+ metadata["model"] = entity_object.chat_options.model_id
47
+ elif hasattr(entity_object, "chat_client") and hasattr(entity_object.chat_client, "model_id"):
48
+ metadata["model"] = entity_object.chat_client.model_id
49
+
50
+ # Try to get chat client type
51
+ if hasattr(entity_object, "chat_client"):
52
+ metadata["chat_client_type"] = entity_object.chat_client.__class__.__name__
53
+
54
+ # Try to get context providers
55
+ if (
56
+ hasattr(entity_object, "context_provider")
57
+ and entity_object.context_provider
58
+ and hasattr(entity_object.context_provider, "__class__")
59
+ ):
60
+ metadata["context_providers"] = [entity_object.context_provider.__class__.__name__] # type: ignore
61
+
62
+ # Try to get middleware
63
+ if hasattr(entity_object, "middleware") and entity_object.middleware:
64
+ middleware_list: list[str] = []
65
+ for m in entity_object.middleware:
66
+ # Try multiple ways to get a good name for middleware
67
+ if hasattr(m, "__name__"): # Function or callable
68
+ middleware_list.append(m.__name__)
69
+ elif hasattr(m, "__class__"): # Class instance
70
+ middleware_list.append(m.__class__.__name__)
71
+ else:
72
+ middleware_list.append(str(m))
73
+ metadata["middleware"] = middleware_list # type: ignore
74
+
75
+ return metadata
76
+
77
+
78
+ # ============================================================================
79
+ # Workflow Input Type Utilities
80
+ # ============================================================================
81
+
82
+
83
+ def extract_executor_message_types(executor: Any) -> list[Any]:
84
+ """Extract declared input types for the given executor.
85
+
86
+ Args:
87
+ executor: Workflow executor object
88
+
89
+ Returns:
90
+ List of message types that the executor accepts
91
+ """
92
+ message_types: list[Any] = []
93
+
94
+ try:
95
+ input_types = getattr(executor, "input_types", None)
96
+ except Exception as exc: # pragma: no cover - defensive logging path
97
+ logger.debug(f"Failed to access executor input_types: {exc}")
98
+ else:
99
+ if input_types:
100
+ message_types = list(input_types)
101
+
102
+ if not message_types and hasattr(executor, "_handlers"):
103
+ try:
104
+ handlers = executor._handlers
105
+ if isinstance(handlers, dict):
106
+ message_types = list(handlers.keys())
107
+ except Exception as exc: # pragma: no cover - defensive logging path
108
+ logger.debug(f"Failed to read executor handlers: {exc}")
109
+
110
+ return message_types
111
+
112
+
113
+ def select_primary_input_type(message_types: list[Any]) -> Any | None:
114
+ """Choose the most user-friendly input type for workflow inputs.
115
+
116
+ Prefers str and dict types for better user experience.
117
+
118
+ Args:
119
+ message_types: List of possible message types
120
+
121
+ Returns:
122
+ Selected primary input type, or None if list is empty
123
+ """
124
+ if not message_types:
125
+ return None
126
+
127
+ preferred = (str, dict)
128
+
129
+ for candidate in preferred:
130
+ for message_type in message_types:
131
+ if message_type is candidate:
132
+ return candidate
133
+ origin = get_origin(message_type)
134
+ if origin is candidate:
135
+ return candidate
136
+
137
+ return message_types[0]
138
+
139
+
140
+ # ============================================================================
141
+ # Type System Utilities
142
+ # ============================================================================
143
+
144
+
145
+ def is_serialization_mixin(cls: type) -> bool:
146
+ """Check if class is a SerializationMixin subclass.
147
+
148
+ Args:
149
+ cls: Class to check
150
+
151
+ Returns:
152
+ True if class is a SerializationMixin subclass
153
+ """
154
+ try:
155
+ from agent_framework._serialization import SerializationMixin
156
+
157
+ return isinstance(cls, type) and issubclass(cls, SerializationMixin)
158
+ except ImportError:
159
+ return False
160
+
161
+
162
+ def _type_to_schema(type_hint: Any, field_name: str) -> dict[str, Any]:
163
+ """Convert a type hint to JSON schema.
164
+
165
+ Args:
166
+ type_hint: Type hint to convert
167
+ field_name: Name of the field (for documentation)
168
+
169
+ Returns:
170
+ JSON schema dict
171
+ """
172
+ type_str = str(type_hint)
173
+
174
+ # Handle None/Optional
175
+ if type_hint is type(None):
176
+ return {"type": "null"}
177
+
178
+ # Handle basic types
179
+ if type_hint is str or "str" in type_str:
180
+ return {"type": "string"}
181
+ if type_hint is int or "int" in type_str:
182
+ return {"type": "integer"}
183
+ if type_hint is float or "float" in type_str:
184
+ return {"type": "number"}
185
+ if type_hint is bool or "bool" in type_str:
186
+ return {"type": "boolean"}
187
+
188
+ # Handle Literal types (for enum-like values)
189
+ if "Literal" in type_str:
190
+ origin = get_origin(type_hint)
191
+ if origin is not None:
192
+ args = get_args(type_hint)
193
+ if args:
194
+ return {"type": "string", "enum": list(args)}
195
+
196
+ # Handle Union/Optional
197
+ if "Union" in type_str or "Optional" in type_str:
198
+ origin = get_origin(type_hint)
199
+ if origin is not None:
200
+ args = get_args(type_hint)
201
+ # Filter out None type
202
+ non_none_args = [arg for arg in args if arg is not type(None)]
203
+ if len(non_none_args) == 1:
204
+ return _type_to_schema(non_none_args[0], field_name)
205
+ # Multiple types - pick first non-None
206
+ if non_none_args:
207
+ return _type_to_schema(non_none_args[0], field_name)
208
+
209
+ # Handle collections
210
+ if "list" in type_str or "List" in type_str or "Sequence" in type_str:
211
+ origin = get_origin(type_hint)
212
+ if origin is not None:
213
+ args = get_args(type_hint)
214
+ if args:
215
+ items_schema = _type_to_schema(args[0], field_name)
216
+ return {"type": "array", "items": items_schema}
217
+ return {"type": "array"}
218
+
219
+ if "dict" in type_str or "Dict" in type_str or "Mapping" in type_str:
220
+ return {"type": "object"}
221
+
222
+ # Default fallback
223
+ return {"type": "string", "description": f"Type: {type_hint}"}
224
+
225
+
226
+ def generate_schema_from_serialization_mixin(cls: type[Any]) -> dict[str, Any]:
227
+ """Generate JSON schema from SerializationMixin class.
228
+
229
+ Introspects the __init__ signature to extract parameter types and defaults.
230
+
231
+ Args:
232
+ cls: SerializationMixin subclass
233
+
234
+ Returns:
235
+ JSON schema dict
236
+ """
237
+ sig = inspect.signature(cls)
238
+
239
+ # Get type hints
240
+ try:
241
+ from typing import get_type_hints
242
+
243
+ type_hints = get_type_hints(cls)
244
+ except Exception:
245
+ type_hints = {}
246
+
247
+ properties: dict[str, Any] = {}
248
+ required: list[str] = []
249
+
250
+ for param_name, param in sig.parameters.items():
251
+ if param_name in ("self", "kwargs"):
252
+ continue
253
+
254
+ # Get type annotation
255
+ param_type = type_hints.get(param_name, str)
256
+
257
+ # Generate schema for this parameter
258
+ param_schema = _type_to_schema(param_type, param_name)
259
+ properties[param_name] = param_schema
260
+
261
+ # Check if required (no default value, not VAR_KEYWORD)
262
+ if param.default == inspect.Parameter.empty and param.kind != inspect.Parameter.VAR_KEYWORD:
263
+ required.append(param_name)
264
+
265
+ schema: dict[str, Any] = {"type": "object", "properties": properties}
266
+
267
+ if required:
268
+ schema["required"] = required
269
+
270
+ return schema
271
+
272
+
273
+ def generate_schema_from_dataclass(cls: type[Any]) -> dict[str, Any]:
274
+ """Generate JSON schema from dataclass.
275
+
276
+ Args:
277
+ cls: Dataclass type
278
+
279
+ Returns:
280
+ JSON schema dict
281
+ """
282
+ if not is_dataclass(cls):
283
+ return {"type": "object"}
284
+
285
+ properties: dict[str, Any] = {}
286
+ required: list[str] = []
287
+
288
+ for field in fields(cls):
289
+ # Generate schema for field type
290
+ field_schema = _type_to_schema(field.type, field.name)
291
+ properties[field.name] = field_schema
292
+
293
+ # Check if required (no default value)
294
+ if field.default == field.default_factory: # No default
295
+ required.append(field.name)
296
+
297
+ schema: dict[str, Any] = {"type": "object", "properties": properties}
298
+
299
+ if required:
300
+ schema["required"] = required
301
+
302
+ return schema
303
+
304
+
305
+ def generate_input_schema(input_type: type) -> dict[str, Any]:
306
+ """Generate JSON schema for workflow input type.
307
+
308
+ Supports multiple input types in priority order:
309
+ 1. Built-in types (str, dict, int, etc.)
310
+ 2. Pydantic models (via model_json_schema)
311
+ 3. SerializationMixin classes (via __init__ introspection)
312
+ 4. Dataclasses (via fields introspection)
313
+ 5. Fallback to string
314
+
315
+ Args:
316
+ input_type: Input type to generate schema for
317
+
318
+ Returns:
319
+ JSON schema dict
320
+ """
321
+ # 1. Built-in types
322
+ if input_type is str:
323
+ return {"type": "string"}
324
+ if input_type is dict:
325
+ return {"type": "object"}
326
+ if input_type is int:
327
+ return {"type": "integer"}
328
+ if input_type is float:
329
+ return {"type": "number"}
330
+ if input_type is bool:
331
+ return {"type": "boolean"}
332
+
333
+ # 2. Pydantic models (legacy support)
334
+ if hasattr(input_type, "model_json_schema"):
335
+ return input_type.model_json_schema() # type: ignore
336
+
337
+ # 3. SerializationMixin classes (ChatMessage, etc.)
338
+ if is_serialization_mixin(input_type):
339
+ return generate_schema_from_serialization_mixin(input_type)
340
+
341
+ # 4. Dataclasses
342
+ if is_dataclass(input_type):
343
+ return generate_schema_from_dataclass(input_type)
344
+
345
+ # 5. Fallback to string
346
+ type_name = getattr(input_type, "__name__", str(input_type))
347
+ return {"type": "string", "description": f"Input type: {type_name}"}
348
+
349
+
350
+ # ============================================================================
351
+ # Input Parsing Utilities
352
+ # ============================================================================
353
+
354
+
355
+ def parse_input_for_type(input_data: Any, target_type: type) -> Any:
356
+ """Parse input data to match the target type.
357
+
358
+ Handles conversion from raw input (string, dict) to the expected type:
359
+ - Built-in types: direct conversion
360
+ - Pydantic models: use model_validate or model_validate_json
361
+ - SerializationMixin: use from_dict or construct from string
362
+ - Dataclasses: construct from dict
363
+
364
+ Args:
365
+ input_data: Raw input data (string, dict, or already correct type)
366
+ target_type: Expected type for the input
367
+
368
+ Returns:
369
+ Parsed input matching target_type, or original input if parsing fails
370
+ """
371
+ # If already correct type, return as-is
372
+ if isinstance(input_data, target_type):
373
+ return input_data
374
+
375
+ # Handle string input
376
+ if isinstance(input_data, str):
377
+ return _parse_string_input(input_data, target_type)
378
+
379
+ # Handle dict input
380
+ if isinstance(input_data, dict):
381
+ return _parse_dict_input(input_data, target_type)
382
+
383
+ # Fallback: return original
384
+ return input_data
385
+
386
+
387
+ def _parse_string_input(input_str: str, target_type: type) -> Any:
388
+ """Parse string input to target type.
389
+
390
+ Args:
391
+ input_str: Input string
392
+ target_type: Target type
393
+
394
+ Returns:
395
+ Parsed input or original string
396
+ """
397
+ # Built-in types
398
+ if target_type is str:
399
+ return input_str
400
+ if target_type is int:
401
+ try:
402
+ return int(input_str)
403
+ except ValueError:
404
+ return input_str
405
+ elif target_type is float:
406
+ try:
407
+ return float(input_str)
408
+ except ValueError:
409
+ return input_str
410
+ elif target_type is bool:
411
+ return input_str.lower() in ("true", "1", "yes")
412
+
413
+ # Pydantic models
414
+ if hasattr(target_type, "model_validate_json"):
415
+ try:
416
+ # Try parsing as JSON first
417
+ if input_str.strip().startswith("{"):
418
+ return target_type.model_validate_json(input_str) # type: ignore
419
+
420
+ # Try common field names with the string value
421
+ common_fields = ["text", "message", "content", "input", "data"]
422
+ for field in common_fields:
423
+ try:
424
+ return target_type(**{field: input_str}) # type: ignore
425
+ except Exception as e:
426
+ logger.debug(f"Failed to parse string input with field '{field}': {e}")
427
+ continue
428
+ except Exception as e:
429
+ logger.debug(f"Failed to parse string as Pydantic model: {e}")
430
+
431
+ # SerializationMixin (like ChatMessage)
432
+ if is_serialization_mixin(target_type):
433
+ try:
434
+ # Try parsing as JSON dict first
435
+ if input_str.strip().startswith("{"):
436
+ data = json.loads(input_str)
437
+ if hasattr(target_type, "from_dict"):
438
+ return target_type.from_dict(data) # type: ignore
439
+ return target_type(**data) # type: ignore
440
+
441
+ # For ChatMessage specifically: create from text
442
+ # Try common field patterns
443
+ common_fields = ["text", "message", "content"]
444
+ sig = inspect.signature(target_type)
445
+ params = list(sig.parameters.keys())
446
+
447
+ # If it has 'text' param, use it
448
+ if "text" in params:
449
+ try:
450
+ return target_type(role="user", text=input_str) # type: ignore
451
+ except Exception as e:
452
+ logger.debug(f"Failed to create SerializationMixin with text field: {e}")
453
+
454
+ # Try other common fields
455
+ for field in common_fields:
456
+ if field in params:
457
+ try:
458
+ return target_type(**{field: input_str}) # type: ignore
459
+ except Exception as e:
460
+ logger.debug(f"Failed to create SerializationMixin with field '{field}': {e}")
461
+ continue
462
+ except Exception as e:
463
+ logger.debug(f"Failed to parse string as SerializationMixin: {e}")
464
+
465
+ # Dataclasses
466
+ if is_dataclass(target_type):
467
+ try:
468
+ # Try parsing as JSON
469
+ if input_str.strip().startswith("{"):
470
+ data = json.loads(input_str)
471
+ return target_type(**data) # type: ignore
472
+
473
+ # Try common field names
474
+ common_fields = ["text", "message", "content", "input", "data"]
475
+ for field in common_fields:
476
+ try:
477
+ return target_type(**{field: input_str}) # type: ignore
478
+ except Exception as e:
479
+ logger.debug(f"Failed to create dataclass with field '{field}': {e}")
480
+ continue
481
+ except Exception as e:
482
+ logger.debug(f"Failed to parse string as dataclass: {e}")
483
+
484
+ # Fallback: return original string
485
+ return input_str
486
+
487
+
488
+ def _parse_dict_input(input_dict: dict[str, Any], target_type: type) -> Any:
489
+ """Parse dict input to target type.
490
+
491
+ Args:
492
+ input_dict: Input dictionary
493
+ target_type: Target type
494
+
495
+ Returns:
496
+ Parsed input or original dict
497
+ """
498
+ # Handle primitive types - extract from common field names
499
+ if target_type in (str, int, float, bool):
500
+ try:
501
+ # If it's already the right type, return as-is
502
+ if isinstance(input_dict, target_type):
503
+ return input_dict
504
+
505
+ # Try "input" field first (common for workflow inputs)
506
+ if "input" in input_dict:
507
+ return target_type(input_dict["input"]) # type: ignore
508
+
509
+ # If single-key dict, extract the value
510
+ if len(input_dict) == 1:
511
+ value = next(iter(input_dict.values()))
512
+ return target_type(value) # type: ignore
513
+
514
+ # Otherwise, return as-is
515
+ return input_dict
516
+ except (ValueError, TypeError) as e:
517
+ logger.debug(f"Failed to convert dict to {target_type}: {e}")
518
+ return input_dict
519
+
520
+ # If target is dict, return as-is
521
+ if target_type is dict:
522
+ return input_dict
523
+
524
+ # Pydantic models
525
+ if hasattr(target_type, "model_validate"):
526
+ try:
527
+ return target_type.model_validate(input_dict) # type: ignore
528
+ except Exception as e:
529
+ logger.debug(f"Failed to validate dict as Pydantic model: {e}")
530
+
531
+ # SerializationMixin
532
+ if is_serialization_mixin(target_type):
533
+ try:
534
+ if hasattr(target_type, "from_dict"):
535
+ return target_type.from_dict(input_dict) # type: ignore
536
+ return target_type(**input_dict) # type: ignore
537
+ except Exception as e:
538
+ logger.debug(f"Failed to parse dict as SerializationMixin: {e}")
539
+
540
+ # Dataclasses
541
+ if is_dataclass(target_type):
542
+ try:
543
+ return target_type(**input_dict) # type: ignore
544
+ except Exception as e:
545
+ logger.debug(f"Failed to parse dict as dataclass: {e}")
546
+
547
+ # Fallback: return original dict
548
+ return input_dict
@@ -4,11 +4,18 @@
4
4
 
5
5
  # Import discovery models
6
6
  # Import all OpenAI types directly from the openai package
7
+ from openai.types.conversations import Conversation, ConversationDeletedResource
8
+ from openai.types.conversations.conversation_item import ConversationItem
7
9
  from openai.types.responses import (
8
10
  Response,
11
+ ResponseCompletedEvent,
9
12
  ResponseErrorEvent,
10
13
  ResponseFunctionCallArgumentsDeltaEvent,
14
+ ResponseFunctionToolCall,
15
+ ResponseFunctionToolCallOutputItem,
11
16
  ResponseInputParam,
17
+ ResponseOutputItemAddedEvent,
18
+ ResponseOutputItemDoneEvent,
12
19
  ResponseOutputMessage,
13
20
  ResponseOutputText,
14
21
  ResponseReasoningTextDeltaEvent,
@@ -25,14 +32,9 @@ from ._openai_custom import (
25
32
  AgentFrameworkRequest,
26
33
  OpenAIError,
27
34
  ResponseFunctionResultComplete,
28
- ResponseFunctionResultDelta,
29
35
  ResponseTraceEvent,
30
36
  ResponseTraceEventComplete,
31
- ResponseTraceEventDelta,
32
- ResponseUsageEventComplete,
33
- ResponseUsageEventDelta,
34
37
  ResponseWorkflowEventComplete,
35
- ResponseWorkflowEventDelta,
36
38
  )
37
39
 
38
40
  # Type alias for compatibility
@@ -41,6 +43,9 @@ OpenAIResponse = Response
41
43
  # Export all types for easy importing
42
44
  __all__ = [
43
45
  "AgentFrameworkRequest",
46
+ "Conversation",
47
+ "ConversationDeletedResource",
48
+ "ConversationItem",
44
49
  "DiscoveryResponse",
45
50
  "EntityInfo",
46
51
  "InputTokensDetails",
@@ -49,11 +54,15 @@ __all__ = [
49
54
  "OpenAIResponse",
50
55
  "OutputTokensDetails",
51
56
  "Response",
57
+ "ResponseCompletedEvent",
52
58
  "ResponseErrorEvent",
53
59
  "ResponseFunctionCallArgumentsDeltaEvent",
54
60
  "ResponseFunctionResultComplete",
55
- "ResponseFunctionResultDelta",
61
+ "ResponseFunctionToolCall",
62
+ "ResponseFunctionToolCallOutputItem",
56
63
  "ResponseInputParam",
64
+ "ResponseOutputItemAddedEvent",
65
+ "ResponseOutputItemDoneEvent",
57
66
  "ResponseOutputMessage",
58
67
  "ResponseOutputText",
59
68
  "ResponseReasoningTextDeltaEvent",
@@ -61,12 +70,8 @@ __all__ = [
61
70
  "ResponseTextDeltaEvent",
62
71
  "ResponseTraceEvent",
63
72
  "ResponseTraceEventComplete",
64
- "ResponseTraceEventDelta",
65
73
  "ResponseUsage",
66
- "ResponseUsageEventComplete",
67
- "ResponseUsageEventDelta",
68
74
  "ResponseWorkflowEventComplete",
69
- "ResponseWorkflowEventDelta",
70
75
  "ResponsesModel",
71
76
  "ToolParam",
72
77
  ]
@@ -31,12 +31,18 @@ class EntityInfo(BaseModel):
31
31
  metadata: dict[str, Any] = Field(default_factory=dict)
32
32
 
33
33
  # Source information
34
- source: str = "directory" # "directory", "in_memory", "remote_gallery"
35
- original_url: str | None = None
34
+ source: str = "directory" # "directory" or "in_memory"
36
35
 
37
36
  # Environment variable requirements
38
37
  required_env_vars: list[EnvVarRequirement] | None = None
39
38
 
39
+ # Agent-specific fields (optional, populated when available)
40
+ instructions: str | None = None
41
+ model_id: str | None = None
42
+ chat_client_type: str | None = None
43
+ context_providers: list[str] | None = None
44
+ middleware: list[str] | None = None
45
+
40
46
  # Workflow-specific fields (populated only for detailed info requests)
41
47
  executors: list[str] | None = None
42
48
  workflow_dump: dict[str, Any] | None = None