agentfield 0.1.22rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. agentfield/__init__.py +66 -0
  2. agentfield/agent.py +3569 -0
  3. agentfield/agent_ai.py +1125 -0
  4. agentfield/agent_cli.py +386 -0
  5. agentfield/agent_field_handler.py +494 -0
  6. agentfield/agent_mcp.py +534 -0
  7. agentfield/agent_registry.py +29 -0
  8. agentfield/agent_server.py +1185 -0
  9. agentfield/agent_utils.py +269 -0
  10. agentfield/agent_workflow.py +323 -0
  11. agentfield/async_config.py +278 -0
  12. agentfield/async_execution_manager.py +1227 -0
  13. agentfield/client.py +1447 -0
  14. agentfield/connection_manager.py +280 -0
  15. agentfield/decorators.py +527 -0
  16. agentfield/did_manager.py +337 -0
  17. agentfield/dynamic_skills.py +304 -0
  18. agentfield/execution_context.py +255 -0
  19. agentfield/execution_state.py +453 -0
  20. agentfield/http_connection_manager.py +429 -0
  21. agentfield/litellm_adapters.py +140 -0
  22. agentfield/logger.py +249 -0
  23. agentfield/mcp_client.py +204 -0
  24. agentfield/mcp_manager.py +340 -0
  25. agentfield/mcp_stdio_bridge.py +550 -0
  26. agentfield/memory.py +723 -0
  27. agentfield/memory_events.py +489 -0
  28. agentfield/multimodal.py +173 -0
  29. agentfield/multimodal_response.py +403 -0
  30. agentfield/pydantic_utils.py +227 -0
  31. agentfield/rate_limiter.py +280 -0
  32. agentfield/result_cache.py +441 -0
  33. agentfield/router.py +190 -0
  34. agentfield/status.py +70 -0
  35. agentfield/types.py +710 -0
  36. agentfield/utils.py +26 -0
  37. agentfield/vc_generator.py +464 -0
  38. agentfield/vision.py +198 -0
  39. agentfield-0.1.22rc2.dist-info/METADATA +102 -0
  40. agentfield-0.1.22rc2.dist-info/RECORD +42 -0
  41. agentfield-0.1.22rc2.dist-info/WHEEL +5 -0
  42. agentfield-0.1.22rc2.dist-info/top_level.txt +1 -0
@@ -0,0 +1,527 @@
1
+ """
2
+ Enhanced decorators for AgentField SDK with automatic workflow tracking.
3
+ Provides always-on workflow tracking for reasoner calls.
4
+ """
5
+
6
+ import asyncio
7
+ import functools
8
+ import inspect
9
+ import time
10
+ from typing import Any, Callable, Dict, List, Optional, Union
11
+
12
+ from agentfield.logger import log_warn
13
+
14
+ from .execution_context import (
15
+ ExecutionContext,
16
+ get_current_context,
17
+ set_execution_context,
18
+ reset_execution_context,
19
+ )
20
+ from .agent_registry import get_current_agent_instance
21
+ from .types import ReasonerDefinition
22
+ from .pydantic_utils import convert_function_args, should_convert_args
23
+ from pydantic import ValidationError
24
+
25
+
26
+ def reasoner(
27
+ func=None,
28
+ *,
29
+ path: Optional[str] = None,
30
+ tags: Optional[List[str]] = None,
31
+ description: Optional[str] = None,
32
+ track_workflow: bool = True,
33
+ **kwargs,
34
+ ):
35
+ """
36
+ Enhanced reasoner decorator with automatic workflow tracking and full feature support.
37
+
38
+ Supports both:
39
+ @reasoner # Default: track_workflow=True
40
+ @reasoner(track_workflow=False) # Explicit: disable tracking
41
+ @reasoner(path="/custom/path") # Custom endpoint path
42
+ @reasoner(tags=["ai", "nlp"]) # Tags for organization
43
+ @reasoner(description="...") # Custom description
44
+
45
+ Args:
46
+ func: The function to decorate (when used without parentheses)
47
+ path: Custom API endpoint path for this reasoner
48
+ tags: List of tags for organizing and categorizing reasoners
49
+ description: Description of what this reasoner does
50
+ track_workflow: Whether to enable automatic workflow tracking (default: True)
51
+ **kwargs: Additional metadata to store with the reasoner
52
+
53
+ Returns:
54
+ Decorated function with workflow tracking capabilities and full metadata support
55
+ """
56
+
57
+ def decorator(f: Callable) -> Callable:
58
+ @functools.wraps(f)
59
+ async def wrapper(*args, **kwargs):
60
+ if track_workflow:
61
+ # Execute with automatic workflow tracking
62
+ return await _execute_with_tracking(f, *args, **kwargs)
63
+ else:
64
+ # Execute without tracking
65
+ if asyncio.iscoroutinefunction(f):
66
+ return await f(*args, **kwargs)
67
+ else:
68
+ return f(*args, **kwargs)
69
+
70
+ # Store comprehensive metadata on the function
71
+ wrapper._is_reasoner = True
72
+ wrapper._track_workflow = track_workflow
73
+ wrapper._reasoner_name = f.__name__
74
+ wrapper._original_func = f
75
+ wrapper._reasoner_path = path
76
+ wrapper._reasoner_tags = tags or []
77
+ wrapper._reasoner_description = (
78
+ description or f.__doc__ or f"Reasoner: {f.__name__}"
79
+ )
80
+
81
+ # Store any additional metadata
82
+ for key, value in kwargs.items():
83
+ setattr(wrapper, f"_reasoner_{key}", value)
84
+
85
+ return wrapper
86
+
87
+ # Handle both @reasoner and @reasoner(...) syntax
88
+ if func is None:
89
+ # Called as @reasoner(track_workflow=False) or @reasoner(path="/custom")
90
+ return decorator
91
+ else:
92
+ # Called as @reasoner (no parentheses)
93
+ return decorator(func)
94
+
95
+
96
+ async def _execute_with_tracking(func: Callable, *args, **kwargs) -> Any:
97
+ """
98
+ Core function that handles automatic workflow tracking for reasoner calls.
99
+
100
+ Args:
101
+ func: The reasoner function to execute
102
+ *args: Positional arguments for the function
103
+ **kwargs: Keyword arguments for the function
104
+
105
+ Returns:
106
+ The result of the function execution
107
+ """
108
+ # Get current execution context
109
+ current_context = get_current_context()
110
+
111
+ # Get agent instance (from context or global registry)
112
+ agent_instance = get_current_agent_instance()
113
+
114
+ if not agent_instance:
115
+ # No agent context - execute without tracking
116
+ if asyncio.iscoroutinefunction(func):
117
+ return await func(*args, **kwargs)
118
+ else:
119
+ return func(*args, **kwargs)
120
+
121
+ workflow_handler = getattr(agent_instance, "workflow_handler", None)
122
+ reasoner_name = getattr(func, "__name__", "reasoner")
123
+
124
+ # Generate execution metadata
125
+ # Build a child context when executing under an existing workflow; otherwise create a root context
126
+ if current_context:
127
+ execution_context = current_context.create_child_context()
128
+ execution_context.reasoner_name = reasoner_name
129
+ parent_context = current_context
130
+ else:
131
+ workflow_name = reasoner_name
132
+ if hasattr(agent_instance, "node_id"):
133
+ workflow_name = f"{agent_instance.node_id}_{workflow_name}"
134
+ execution_context = ExecutionContext.new_root(
135
+ agent_node_id=getattr(agent_instance, "node_id", "agent"),
136
+ reasoner_name=workflow_name,
137
+ )
138
+ execution_context.reasoner_name = reasoner_name
139
+ execution_context.agent_instance = agent_instance
140
+ parent_context = None
141
+
142
+ # Align run/session metadata with the parent context so registration inherits the workflow run
143
+ if parent_context:
144
+ execution_context.run_id = parent_context.run_id
145
+ execution_context.session_id = parent_context.session_id
146
+ execution_context.caller_did = parent_context.caller_did
147
+ execution_context.target_did = parent_context.target_did
148
+ execution_context.agent_node_did = parent_context.agent_node_did
149
+ execution_context.agent_instance = agent_instance
150
+
151
+ if workflow_handler is not None:
152
+ execution_context = await workflow_handler._ensure_execution_registered(
153
+ execution_context, reasoner_name, parent_context
154
+ )
155
+
156
+ previous_agent_context = getattr(agent_instance, "_current_execution_context", None)
157
+ agent_instance._current_execution_context = execution_context
158
+
159
+ client = getattr(agent_instance, "client", None)
160
+ previous_client_context = None
161
+ if client is not None:
162
+ previous_client_context = getattr(client, "_current_workflow_context", None)
163
+ client._current_workflow_context = execution_context
164
+
165
+ token = None
166
+ start_time = time.time()
167
+ parent_execution_id = parent_context.execution_id if parent_context else None
168
+
169
+ sig = inspect.signature(func)
170
+ call_kwargs = dict(kwargs or {})
171
+ input_data: Dict[str, Any] = {}
172
+
173
+ # Prepare DID-aware execution context so VC generation works for decorator-driven calls
174
+ did_execution_context = None
175
+ agent_has_did = getattr(agent_instance, "did_enabled", False) and getattr(
176
+ agent_instance, "did_manager", None
177
+ )
178
+ if agent_has_did:
179
+ try:
180
+ session_id = execution_context.session_id or execution_context.workflow_id
181
+ did_execution_context = agent_instance.did_manager.create_execution_context(
182
+ execution_context.execution_id,
183
+ execution_context.workflow_id,
184
+ session_id,
185
+ "agent",
186
+ reasoner_name,
187
+ )
188
+ if did_execution_context and hasattr(
189
+ agent_instance, "_populate_execution_context_with_did"
190
+ ):
191
+ agent_instance._populate_execution_context_with_did(
192
+ execution_context, did_execution_context
193
+ )
194
+ except Exception as exc: # pragma: no cover - diagnostic only
195
+ if getattr(agent_instance, "dev_mode", False):
196
+ log_warn(f"Failed to build DID context for {reasoner_name}: {exc}")
197
+ did_execution_context = None
198
+
199
+ def _maybe_generate_vc(
200
+ status: str, result_payload: Any, duration_ms: int, error_message: Optional[str]
201
+ ) -> None:
202
+ """Fire-and-forget VC generation so decorator parity matches HTTP path."""
203
+ generate_vc = getattr(agent_instance, "_generate_vc_async", None)
204
+ vc_generator = getattr(agent_instance, "vc_generator", None)
205
+ if (
206
+ did_execution_context
207
+ and callable(generate_vc)
208
+ and hasattr(agent_instance, "_should_generate_vc")
209
+ and agent_instance._should_generate_vc(
210
+ reasoner_name, getattr(agent_instance, "_reasoner_vc_overrides", {})
211
+ )
212
+ ):
213
+ asyncio.create_task(
214
+ generate_vc(
215
+ vc_generator,
216
+ did_execution_context,
217
+ reasoner_name,
218
+ input_data,
219
+ result_payload,
220
+ status=status,
221
+ error_message=error_message,
222
+ duration_ms=duration_ms,
223
+ )
224
+ )
225
+
226
+ try:
227
+ # Execute function with new context
228
+ token = set_execution_context(execution_context)
229
+
230
+ # Inject execution_context if the function accepts it
231
+ if "execution_context" in sig.parameters:
232
+ call_kwargs.setdefault("execution_context", execution_context)
233
+
234
+ # 🔥 NEW: Automatic Pydantic model conversion (FastAPI-like behavior)
235
+ try:
236
+ if should_convert_args(func):
237
+ converted_args, converted_kwargs = convert_function_args(
238
+ func, args, call_kwargs
239
+ )
240
+ args = converted_args
241
+ call_kwargs = converted_kwargs
242
+ except ValidationError as e:
243
+ # Re-raise validation errors with context
244
+ raise ValidationError(
245
+ f"Pydantic validation failed for reasoner '{func.__name__}': {e}",
246
+ model=getattr(e, "model", None),
247
+ ) from e
248
+ except Exception as e:
249
+ # Log conversion errors but continue with original args for backward compatibility
250
+ if hasattr(agent_instance, "dev_mode") and agent_instance.dev_mode:
251
+ log_warn(f"Failed to convert arguments for {func.__name__}: {e}")
252
+
253
+ input_data = _build_input_payload(sig, args, call_kwargs)
254
+
255
+ start_payload = {
256
+ "reasoner_name": reasoner_name,
257
+ "args": list(args),
258
+ "kwargs": dict(call_kwargs),
259
+ "input_data": input_data,
260
+ "parent_execution_id": parent_execution_id,
261
+ }
262
+ await asyncio.create_task(
263
+ _send_workflow_start(
264
+ agent_instance,
265
+ execution_context,
266
+ start_payload,
267
+ )
268
+ )
269
+
270
+ if asyncio.iscoroutinefunction(func):
271
+ result = await func(*args, **call_kwargs)
272
+ else:
273
+ result = func(*args, **call_kwargs)
274
+
275
+ duration_ms = int((time.time() - start_time) * 1000)
276
+ completion_payload = {
277
+ "input_data": input_data,
278
+ "parent_execution_id": parent_execution_id,
279
+ }
280
+ await asyncio.create_task(
281
+ _send_workflow_completion(
282
+ agent_instance,
283
+ execution_context,
284
+ result,
285
+ duration_ms,
286
+ completion_payload,
287
+ )
288
+ )
289
+ _maybe_generate_vc("success", result, duration_ms, None)
290
+ return result
291
+ except Exception as exc:
292
+ duration_ms = int((time.time() - start_time) * 1000)
293
+ error_payload = {
294
+ "input_data": input_data,
295
+ "parent_execution_id": parent_execution_id,
296
+ }
297
+ _maybe_generate_vc("error", None, duration_ms, str(exc))
298
+ await asyncio.create_task(
299
+ _send_workflow_error(
300
+ agent_instance,
301
+ execution_context,
302
+ str(exc),
303
+ duration_ms,
304
+ error_payload,
305
+ )
306
+ )
307
+ raise
308
+
309
+ finally:
310
+ if token is not None:
311
+ reset_execution_context(token)
312
+ agent_instance._current_execution_context = previous_agent_context
313
+ if client is not None:
314
+ client._current_workflow_context = previous_client_context
315
+
316
+
317
+ def _build_input_payload(
318
+ signature: inspect.Signature, args: tuple, kwargs: Dict[str, Any]
319
+ ) -> Dict[str, Any]:
320
+ if not signature.parameters:
321
+ return dict(kwargs)
322
+
323
+ try:
324
+ bound = signature.bind_partial(*args, **kwargs)
325
+ bound.apply_defaults()
326
+ except Exception:
327
+ payload = {f"arg_{idx}": value for idx, value in enumerate(args)}
328
+ payload.update(kwargs)
329
+ return payload
330
+
331
+ payload = {}
332
+ for name, value in bound.arguments.items():
333
+ if name == "self":
334
+ continue
335
+ payload[name] = value
336
+ return payload
337
+
338
+
339
+ def _compose_event_payload(
340
+ agent,
341
+ context: ExecutionContext,
342
+ reasoner_name: str,
343
+ status: str,
344
+ parent_execution_id: Optional[str],
345
+ input_data: Optional[Dict[str, Any]] = None,
346
+ ) -> Dict[str, Any]:
347
+ event: Dict[str, Any] = {
348
+ "execution_id": context.execution_id,
349
+ "workflow_id": context.workflow_id,
350
+ "run_id": context.run_id,
351
+ "reasoner_id": reasoner_name,
352
+ "agent_node_id": getattr(agent, "node_id", None),
353
+ "status": status,
354
+ "type": reasoner_name,
355
+ "parent_execution_id": parent_execution_id,
356
+ "parent_workflow_id": context.parent_workflow_id,
357
+ }
358
+ if input_data is not None:
359
+ event["input_data"] = input_data
360
+ return event
361
+
362
+
363
+ def on_change(pattern: Union[str, List[str]]):
364
+ """
365
+ Decorator to mark a function as a memory event listener.
366
+
367
+ Args:
368
+ pattern: Memory pattern(s) to listen for changes
369
+
370
+ Returns:
371
+ Decorated function with memory event listener metadata
372
+ """
373
+
374
+ def decorator(func: Callable) -> Callable:
375
+ @functools.wraps(func)
376
+ async def wrapper(*args, **kwargs):
377
+ return await func(*args, **kwargs)
378
+
379
+ # Attach metadata to the function
380
+ wrapper._memory_event_listener = True
381
+ wrapper._memory_event_patterns = (
382
+ pattern if isinstance(pattern, list) else [pattern]
383
+ )
384
+ return wrapper
385
+
386
+ return decorator
387
+
388
+
389
+ # Legacy support for old reasoner decorator signature
390
+ async def _send_workflow_start(
391
+ agent, context: ExecutionContext, payload: Dict[str, Any]
392
+ ) -> None:
393
+ handler = getattr(agent, "workflow_handler", None)
394
+ if handler is None:
395
+ return
396
+ try:
397
+ reasoner_name = payload.get("reasoner_name", context.reasoner_name)
398
+ parent_execution_id = payload.get("parent_execution_id")
399
+ input_data = payload.get("input_data") or {}
400
+
401
+ if hasattr(handler, "notify_call_start"):
402
+ await handler.notify_call_start(
403
+ context.execution_id,
404
+ context,
405
+ reasoner_name,
406
+ input_data,
407
+ parent_execution_id=parent_execution_id,
408
+ )
409
+ elif hasattr(handler, "fire_and_forget_update"):
410
+ event_payload = _compose_event_payload(
411
+ agent,
412
+ context,
413
+ reasoner_name,
414
+ "running",
415
+ parent_execution_id,
416
+ input_data=input_data,
417
+ )
418
+ await handler.fire_and_forget_update(event_payload)
419
+ except Exception as exc: # pragma: no cover - logging pathway
420
+ if getattr(agent, "dev_mode", False):
421
+ log_warn(f"Failed to emit workflow start: {exc}")
422
+
423
+
424
+ async def _send_workflow_completion(
425
+ agent,
426
+ context: ExecutionContext,
427
+ result: Any,
428
+ duration_ms: int,
429
+ payload: Dict[str, Any],
430
+ ) -> None:
431
+ handler = getattr(agent, "workflow_handler", None)
432
+ if handler is None:
433
+ return
434
+ try:
435
+ parent_execution_id = payload.get("parent_execution_id")
436
+ input_data = payload.get("input_data")
437
+ reasoner_name = context.reasoner_name
438
+
439
+ if hasattr(handler, "notify_call_complete"):
440
+ await handler.notify_call_complete(
441
+ context.execution_id,
442
+ context.workflow_id,
443
+ result,
444
+ duration_ms,
445
+ context,
446
+ input_data=input_data,
447
+ parent_execution_id=parent_execution_id,
448
+ )
449
+ elif hasattr(handler, "fire_and_forget_update"):
450
+ event_payload = _compose_event_payload(
451
+ agent,
452
+ context,
453
+ reasoner_name,
454
+ "succeeded",
455
+ parent_execution_id,
456
+ input_data=input_data,
457
+ )
458
+ event_payload["result"] = result
459
+ event_payload["duration_ms"] = duration_ms
460
+ await handler.fire_and_forget_update(event_payload)
461
+ except Exception as exc: # pragma: no cover - logging pathway
462
+ if getattr(agent, "dev_mode", False):
463
+ log_warn(f"Failed to emit workflow completion: {exc}")
464
+
465
+
466
+ async def _send_workflow_error(
467
+ agent,
468
+ context: ExecutionContext,
469
+ message: str,
470
+ duration_ms: int,
471
+ payload: Dict[str, Any],
472
+ ) -> None:
473
+ handler = getattr(agent, "workflow_handler", None)
474
+ if handler is None:
475
+ return
476
+ try:
477
+ parent_execution_id = payload.get("parent_execution_id")
478
+ input_data = payload.get("input_data")
479
+ reasoner_name = context.reasoner_name
480
+
481
+ if hasattr(handler, "notify_call_error"):
482
+ await handler.notify_call_error(
483
+ context.execution_id,
484
+ context.workflow_id,
485
+ message,
486
+ duration_ms,
487
+ context,
488
+ input_data=input_data,
489
+ parent_execution_id=parent_execution_id,
490
+ )
491
+ elif hasattr(handler, "fire_and_forget_update"):
492
+ event_payload = _compose_event_payload(
493
+ agent,
494
+ context,
495
+ reasoner_name,
496
+ "failed",
497
+ parent_execution_id,
498
+ input_data=input_data,
499
+ )
500
+ event_payload["error"] = message
501
+ event_payload["duration_ms"] = duration_ms
502
+ await handler.fire_and_forget_update(event_payload)
503
+ except Exception as exc: # pragma: no cover - logging pathway
504
+ if getattr(agent, "dev_mode", False):
505
+ log_warn(f"Failed to emit workflow error: {exc}")
506
+
507
+
508
+ def legacy_reasoner(reasoner_id: str, input_schema: dict, output_schema: dict):
509
+ """
510
+ Legacy reasoner decorator for backward compatibility.
511
+
512
+ This is kept for compatibility with existing code that uses the old signature.
513
+ New code should use the enhanced @reasoner decorator.
514
+ """
515
+
516
+ def decorator(func):
517
+ @functools.wraps(func)
518
+ def wrapper(*args, **kwargs):
519
+ return func(*args, **kwargs)
520
+
521
+ # Attach metadata to the function
522
+ wrapper._reasoner_def = ReasonerDefinition(
523
+ id=reasoner_id, input_schema=input_schema, output_schema=output_schema
524
+ )
525
+ return wrapper
526
+
527
+ return decorator