agnt5 0.2.5__cp39-abi3-macosx_11_0_arm64.whl → 0.2.6__cp39-abi3-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
agnt5/__init__.py CHANGED
@@ -32,7 +32,7 @@ from .tool import Tool, ToolRegistry, tool
32
32
  from .types import BackoffPolicy, BackoffType, FunctionConfig, RetryPolicy, WorkflowConfig
33
33
  from .version import _get_version
34
34
  from .worker import Worker
35
- from .workflow import WorkflowRegistry, chatflow, workflow
35
+ from .workflow import WorkflowRegistry, workflow
36
36
 
37
37
  # Expose simplified language model API (recommended)
38
38
  from . import lm
@@ -57,7 +57,6 @@ __all__ = [
57
57
  "with_entity_context",
58
58
  "create_entity_context",
59
59
  "workflow",
60
- "chatflow",
61
60
  "WorkflowRegistry",
62
61
  "tool",
63
62
  "Tool",
agnt5/_core.abi3.so CHANGED
Binary file
agnt5/agent.py CHANGED
@@ -13,7 +13,7 @@ from typing import Any, Callable, Dict, List, Optional
13
13
 
14
14
  from .context import Context
15
15
  from . import lm
16
- from .lm import GenerateRequest, GenerateResponse, Message, ModelConfig, ToolDefinition
16
+ from .lm import GenerateRequest, GenerateResponse, LanguageModel, Message, ModelConfig, ToolDefinition
17
17
  from .tool import Tool, ToolRegistry
18
18
  from ._telemetry import setup_module_logger
19
19
 
@@ -201,7 +201,7 @@ class Agent:
201
201
  def __init__(
202
202
  self,
203
203
  name: str,
204
- model: str,
204
+ model: Any, # Can be string like "openai/gpt-4o-mini" OR LanguageModel instance
205
205
  instructions: str,
206
206
  tools: Optional[List[Any]] = None,
207
207
  handoffs: Optional[List[Handoff]] = None,
@@ -210,12 +210,13 @@ class Agent:
210
210
  top_p: Optional[float] = None,
211
211
  model_config: Optional[ModelConfig] = None,
212
212
  max_iterations: int = 10,
213
+ model_name: Optional[str] = None, # For backwards compatibility with tests
213
214
  ):
214
215
  """Initialize agent.
215
216
 
216
217
  Args:
217
218
  name: Agent name/identifier
218
- model: Model string with provider prefix (e.g., "openai/gpt-4o-mini")
219
+ model: Model string with provider prefix (e.g., "openai/gpt-4o-mini") OR LanguageModel instance
219
220
  instructions: System instructions for the agent
220
221
  tools: List of tools available to the agent (functions, Tool instances, or Agent instances)
221
222
  handoffs: List of Handoff configurations for agent-to-agent delegation
@@ -224,9 +225,9 @@ class Agent:
224
225
  top_p: Nucleus sampling parameter
225
226
  model_config: Optional advanced configuration (custom endpoints, headers, etc.)
226
227
  max_iterations: Maximum reasoning iterations
228
+ model_name: Optional model name (for backwards compatibility, used when model is a LanguageModel instance)
227
229
  """
228
230
  self.name = name
229
- self.model = model
230
231
  self.instructions = instructions
231
232
  self.temperature = temperature
232
233
  self.max_tokens = max_tokens
@@ -234,6 +235,20 @@ class Agent:
234
235
  self.model_config = model_config
235
236
  self.max_iterations = max_iterations
236
237
 
238
+ # Support both string model names and LanguageModel instances
239
+ if isinstance(model, str):
240
+ # New API: model is a string like "openai/gpt-4o-mini"
241
+ self.model = model
242
+ self.model_name = model_name or model
243
+ self._language_model = None # Will create on demand
244
+ elif isinstance(model, LanguageModel):
245
+ # Old API (for tests): model is a LanguageModel instance
246
+ self._language_model = model
247
+ self.model = model # Keep for backwards compatibility
248
+ self.model_name = model_name or "mock-model"
249
+ else:
250
+ raise TypeError(f"model must be a string or LanguageModel instance, got {type(model)}")
251
+
237
252
  # Store handoffs for building handoff tools
238
253
  self.handoffs = handoffs or []
239
254
 
agnt5/lm.py CHANGED
@@ -33,6 +33,7 @@ Supported Providers (via model prefix):
33
33
  from __future__ import annotations
34
34
 
35
35
  import json
36
+ from abc import ABC, abstractmethod
36
37
  from dataclasses import dataclass, field
37
38
  from enum import Enum
38
39
  from typing import Any, AsyncIterator, Dict, List, Optional
@@ -211,9 +212,44 @@ class GenerateRequest:
211
212
  response_schema: Optional[str] = None # JSON-encoded schema for structured output
212
213
 
213
214
 
215
+ # Abstract base class for language models
216
+ # This exists primarily for testing/mocking purposes
217
+ class LanguageModel(ABC):
218
+ """Abstract base class for language model implementations.
219
+
220
+ This class defines the interface that all language models must implement.
221
+ It's primarily used for testing and mocking, as production code should use
222
+ the module-level generate() and stream() functions instead.
223
+ """
224
+
225
+ @abstractmethod
226
+ async def generate(self, request: GenerateRequest) -> GenerateResponse:
227
+ """Generate completion from LLM.
228
+
229
+ Args:
230
+ request: Generation request with model, messages, and configuration
231
+
232
+ Returns:
233
+ GenerateResponse with text, usage, and optional tool calls
234
+ """
235
+ pass
236
+
237
+ @abstractmethod
238
+ async def stream(self, request: GenerateRequest) -> AsyncIterator[str]:
239
+ """Stream completion from LLM.
240
+
241
+ Args:
242
+ request: Generation request with model, messages, and configuration
243
+
244
+ Yields:
245
+ Text chunks as they are generated
246
+ """
247
+ pass
248
+
249
+
214
250
  # Internal wrapper for the Rust-backed implementation
215
251
  # Users should use the module-level generate() and stream() functions instead
216
- class _LanguageModel:
252
+ class _LanguageModel(LanguageModel):
217
253
  """Internal Language Model wrapper using Rust SDK core.
218
254
 
219
255
  This class is for internal use only. Users should use the module-level
agnt5/worker.py CHANGED
@@ -5,7 +5,7 @@ from __future__ import annotations
5
5
  import asyncio
6
6
  import contextvars
7
7
  import logging
8
- from typing import Any, Dict, Optional
8
+ from typing import Any, Dict, List, Optional
9
9
 
10
10
  from .function import FunctionRegistry
11
11
  from .workflow import WorkflowRegistry
@@ -57,8 +57,29 @@ class Worker:
57
57
  coordinator_endpoint: Optional[str] = None,
58
58
  runtime: str = "standalone",
59
59
  metadata: Optional[Dict[str, str]] = None,
60
+ # Explicit component registration (hybrid approach)
61
+ functions: Optional[List] = None,
62
+ workflows: Optional[List] = None,
63
+ entities: Optional[List] = None,
64
+ agents: Optional[List] = None,
65
+ tools: Optional[List] = None,
66
+ # Auto-registration (development mode)
67
+ auto_register: bool = False,
68
+ auto_register_paths: Optional[List[str]] = None,
69
+ pyproject_path: Optional[str] = None,
60
70
  ):
61
- """Initialize a new Worker.
71
+ """Initialize a new Worker with explicit or automatic component registration.
72
+
73
+ The Worker supports two registration modes:
74
+
75
+ **Explicit Mode (default, production):**
76
+ - Register workflows/agents explicitly, their dependencies are auto-included
77
+ - Optionally register standalone functions/tools for direct API invocation
78
+
79
+ **Auto-Registration Mode (development):**
80
+ - Automatically discovers all decorated components in source paths
81
+ - Reads source paths from pyproject.toml or uses explicit paths
82
+ - No need to maintain import lists
62
83
 
63
84
  Args:
64
85
  service_name: Unique name for this service
@@ -66,6 +87,40 @@ class Worker:
66
87
  coordinator_endpoint: Coordinator endpoint URL (default: from env AGNT5_COORDINATOR_ENDPOINT)
67
88
  runtime: Runtime type - "standalone", "docker", "kubernetes", etc.
68
89
  metadata: Optional service-level metadata
90
+ functions: List of @function decorated handlers (explicit mode)
91
+ workflows: List of @workflow decorated handlers (explicit mode)
92
+ entities: List of Entity classes (explicit mode)
93
+ agents: List of Agent instances (explicit mode)
94
+ tools: List of Tool instances (explicit mode)
95
+ auto_register: Enable automatic component discovery (default: False)
96
+ auto_register_paths: Explicit source paths to scan (overrides pyproject.toml discovery)
97
+ pyproject_path: Path to pyproject.toml (default: current directory)
98
+
99
+ Example (explicit mode - production):
100
+ ```python
101
+ from agnt5 import Worker
102
+ from my_service import greet_user, order_fulfillment, ShoppingCart, analyst_agent
103
+
104
+ worker = Worker(
105
+ service_name="my-service",
106
+ workflows=[order_fulfillment],
107
+ entities=[ShoppingCart],
108
+ agents=[analyst_agent],
109
+ functions=[greet_user],
110
+ )
111
+ await worker.run()
112
+ ```
113
+
114
+ Example (auto-register mode - development):
115
+ ```python
116
+ from agnt5 import Worker
117
+
118
+ worker = Worker(
119
+ service_name="my-service",
120
+ auto_register=True, # Discovers from pyproject.toml
121
+ )
122
+ await worker.run()
123
+ ```
69
124
  """
70
125
  self.service_name = service_name
71
126
  self.service_version = service_version
@@ -99,36 +154,298 @@ class Worker:
99
154
  from .entity import EntityStateManager
100
155
  self._entity_state_manager = EntityStateManager()
101
156
 
157
+ # Component registration: auto-discover or explicit
158
+ if auto_register:
159
+ # Auto-registration mode: discover from source paths
160
+ if auto_register_paths:
161
+ source_paths = auto_register_paths
162
+ logger.info(f"Auto-registration with explicit paths: {source_paths}")
163
+ else:
164
+ source_paths = self._discover_source_paths(pyproject_path)
165
+ logger.info(f"Auto-registration with discovered paths: {source_paths}")
166
+
167
+ # Auto-discover components (will populate _explicit_components)
168
+ self._auto_discover_components(source_paths)
169
+ else:
170
+ # Explicit registration from constructor kwargs
171
+ self._explicit_components = {
172
+ 'functions': list(functions or []),
173
+ 'workflows': list(workflows or []),
174
+ 'entities': list(entities or []),
175
+ 'agents': list(agents or []),
176
+ 'tools': list(tools or []),
177
+ }
178
+
179
+ # Count explicitly registered components
180
+ total_explicit = sum(len(v) for v in self._explicit_components.values())
181
+ logger.info(
182
+ f"Worker initialized: {service_name} v{service_version} (runtime: {runtime}), "
183
+ f"{total_explicit} components explicitly registered"
184
+ )
185
+
186
+ def register_components(
187
+ self,
188
+ functions=None,
189
+ workflows=None,
190
+ entities=None,
191
+ agents=None,
192
+ tools=None,
193
+ ):
194
+ """Register additional components after Worker initialization.
195
+
196
+ This method allows incremental registration of components after the Worker
197
+ has been created. Useful for conditional or dynamic component registration.
198
+
199
+ Args:
200
+ functions: List of functions decorated with @function
201
+ workflows: List of workflows decorated with @workflow
202
+ entities: List of entity classes
203
+ agents: List of agent instances
204
+ tools: List of tool instances
205
+
206
+ Example:
207
+ ```python
208
+ worker = Worker(service_name="my-service")
209
+
210
+ # Register conditionally
211
+ if feature_enabled:
212
+ worker.register_components(workflows=[advanced_workflow])
213
+ ```
214
+ """
215
+ if functions:
216
+ self._explicit_components['functions'].extend(functions)
217
+ logger.debug(f"Incrementally registered {len(functions)} functions")
218
+
219
+ if workflows:
220
+ self._explicit_components['workflows'].extend(workflows)
221
+ logger.debug(f"Incrementally registered {len(workflows)} workflows")
222
+
223
+ if entities:
224
+ self._explicit_components['entities'].extend(entities)
225
+ logger.debug(f"Incrementally registered {len(entities)} entities")
226
+
227
+ if agents:
228
+ self._explicit_components['agents'].extend(agents)
229
+ logger.debug(f"Incrementally registered {len(agents)} agents")
230
+
231
+ if tools:
232
+ self._explicit_components['tools'].extend(tools)
233
+ logger.debug(f"Incrementally registered {len(tools)} tools")
234
+
235
+ total = sum(len(v) for v in self._explicit_components.values())
236
+ logger.info(f"Total components now registered: {total}")
237
+
238
+ def _discover_source_paths(self, pyproject_path: Optional[str] = None) -> List[str]:
239
+ """Discover source paths from pyproject.toml.
240
+
241
+ Reads pyproject.toml to find package source directories using:
242
+ - Hatch: [tool.hatch.build.targets.wheel] packages
243
+ - Maturin: [tool.maturin] python-source
244
+ - Fallback: ["src"] if not found
245
+
246
+ Args:
247
+ pyproject_path: Path to pyproject.toml (default: current directory)
248
+
249
+ Returns:
250
+ List of directory paths to scan (e.g., ["src/agnt5_benchmark"])
251
+ """
252
+ from pathlib import Path
253
+
254
+ # Python 3.11+ has tomllib in stdlib
255
+ try:
256
+ import tomllib
257
+ except ImportError:
258
+ logger.error("tomllib not available (Python 3.11+ required for auto-registration)")
259
+ return ["src"]
260
+
261
+ # Determine pyproject.toml location
262
+ if pyproject_path:
263
+ pyproject_file = Path(pyproject_path)
264
+ else:
265
+ # Look in current directory
266
+ pyproject_file = Path.cwd() / "pyproject.toml"
267
+
268
+ if not pyproject_file.exists():
269
+ logger.warning(
270
+ f"pyproject.toml not found at {pyproject_file}, "
271
+ f"defaulting to 'src/' directory"
272
+ )
273
+ return ["src"]
274
+
275
+ # Parse pyproject.toml
276
+ try:
277
+ with open(pyproject_file, "rb") as f:
278
+ config = tomllib.load(f)
279
+ except Exception as e:
280
+ logger.error(f"Failed to parse pyproject.toml: {e}")
281
+ return ["src"]
282
+
283
+ # Extract source paths based on build system
284
+ source_paths = []
285
+
286
+ # Try Hatch configuration
287
+ if "tool" in config and "hatch" in config["tool"]:
288
+ hatch_config = config["tool"]["hatch"]
289
+ if "build" in hatch_config and "targets" in hatch_config["build"]:
290
+ wheel_config = hatch_config["build"]["targets"].get("wheel", {})
291
+ packages = wheel_config.get("packages", [])
292
+ source_paths.extend(packages)
293
+
294
+ # Try Maturin configuration
295
+ if not source_paths and "tool" in config and "maturin" in config["tool"]:
296
+ maturin_config = config["tool"]["maturin"]
297
+ python_source = maturin_config.get("python-source")
298
+ if python_source:
299
+ source_paths.append(python_source)
300
+
301
+ # Fallback to src/
302
+ if not source_paths:
303
+ logger.info("No source paths in pyproject.toml, defaulting to 'src/'")
304
+ source_paths = ["src"]
305
+
306
+ logger.info(f"Discovered source paths from pyproject.toml: {source_paths}")
307
+ return source_paths
308
+
309
+ def _auto_discover_components(self, source_paths: List[str]) -> None:
310
+ """Auto-discover components by importing all Python files in source paths.
311
+
312
+ Args:
313
+ source_paths: List of directory paths to scan
314
+ """
315
+ import importlib.util
316
+ import sys
317
+ from pathlib import Path
318
+
319
+ logger.info(f"Auto-discovering components in paths: {source_paths}")
320
+
321
+ total_modules = 0
322
+
323
+ for source_path in source_paths:
324
+ path = Path(source_path)
325
+
326
+ if not path.exists():
327
+ logger.warning(f"Source path does not exist: {source_path}")
328
+ continue
329
+
330
+ # Recursively find all .py files
331
+ for py_file in path.rglob("*.py"):
332
+ # Skip __pycache__ and test files
333
+ if "__pycache__" in str(py_file) or py_file.name.startswith("test_"):
334
+ continue
335
+
336
+ # Convert path to module name
337
+ # e.g., src/agnt5_benchmark/functions.py -> agnt5_benchmark.functions
338
+ relative_path = py_file.relative_to(path.parent)
339
+ module_parts = list(relative_path.parts[:-1]) # Remove .py extension part
340
+ module_parts.append(relative_path.stem) # Add filename without .py
341
+ module_name = ".".join(module_parts)
342
+
343
+ # Import module (triggers decorators)
344
+ try:
345
+ if module_name in sys.modules:
346
+ logger.debug(f"Module already imported: {module_name}")
347
+ else:
348
+ spec = importlib.util.spec_from_file_location(module_name, py_file)
349
+ if spec and spec.loader:
350
+ module = importlib.util.module_from_spec(spec)
351
+ sys.modules[module_name] = module
352
+ spec.loader.exec_module(module)
353
+ logger.debug(f"Auto-imported: {module_name}")
354
+ total_modules += 1
355
+ except Exception as e:
356
+ logger.warning(f"Failed to import {module_name}: {e}")
357
+
358
+ logger.info(f"Auto-imported {total_modules} modules")
359
+
360
+ # Collect components from registries
361
+ from .agent import AgentRegistry
362
+ from .entity import EntityRegistry
363
+ from .tool import ToolRegistry
364
+
365
+ # Extract actual objects from registries
366
+ functions = [cfg.handler for cfg in FunctionRegistry.all().values()]
367
+ workflows = [cfg.handler for cfg in WorkflowRegistry.all().values()]
368
+ entities = [et.entity_class for et in EntityRegistry.all().values()]
369
+ agents = list(AgentRegistry.all().values())
370
+ tools = list(ToolRegistry.all().values())
371
+
372
+ self._explicit_components = {
373
+ 'functions': functions,
374
+ 'workflows': workflows,
375
+ 'entities': entities,
376
+ 'agents': agents,
377
+ 'tools': tools,
378
+ }
379
+
102
380
  logger.info(
103
- f"Worker initialized: {service_name} v{service_version} (runtime: {runtime})"
381
+ f"Auto-discovered components: "
382
+ f"{len(functions)} functions, "
383
+ f"{len(workflows)} workflows, "
384
+ f"{len(entities)} entities, "
385
+ f"{len(agents)} agents, "
386
+ f"{len(tools)} tools"
104
387
  )
105
388
 
106
389
  def _discover_components(self):
107
- """Discover all registered components across all registries."""
390
+ """Discover explicit components and auto-include their dependencies.
391
+
392
+ Hybrid approach:
393
+ - Explicitly registered workflows/agents are processed
394
+ - Functions called by workflows are auto-included (TODO: implement)
395
+ - Tools used by agents are auto-included
396
+ - Standalone functions/tools can be explicitly registered
397
+
398
+ Returns:
399
+ List of PyComponentInfo instances for all components
400
+ """
108
401
  components = []
402
+ import json
109
403
 
110
- # Import all registries
111
- from .tool import ToolRegistry
404
+ # Import registries
112
405
  from .entity import EntityRegistry
113
- from .agent import AgentRegistry
406
+ from .tool import ToolRegistry
114
407
 
115
- # Discover functions
116
- import json
117
- for name, config in FunctionRegistry.all().items():
118
- # Serialize schemas to JSON strings
119
- input_schema_str = None
120
- if config.input_schema:
121
- input_schema_str = json.dumps(config.input_schema)
408
+ # Track all components (explicit + auto-included)
409
+ all_functions = set(self._explicit_components['functions'])
410
+ all_tools = set(self._explicit_components['tools'])
411
+
412
+ # Auto-include agent tool dependencies
413
+ for agent in self._explicit_components['agents']:
414
+ if hasattr(agent, 'tools') and agent.tools:
415
+ # Agent.tools is a dict of {tool_name: tool_instance}
416
+ all_tools.update(agent.tools.values())
417
+ logger.debug(
418
+ f"Auto-included {len(agent.tools)} tools from agent '{agent.name}'"
419
+ )
420
+
421
+ # Log registration summary
422
+ explicit_func_count = len(self._explicit_components['functions'])
423
+ explicit_tool_count = len(self._explicit_components['tools'])
424
+ auto_func_count = len(all_functions) - explicit_func_count
425
+ auto_tool_count = len(all_tools) - explicit_tool_count
426
+
427
+ logger.info(
428
+ f"Component registration summary: "
429
+ f"{len(all_functions)} functions ({explicit_func_count} explicit, {auto_func_count} auto-included), "
430
+ f"{len(self._explicit_components['workflows'])} workflows, "
431
+ f"{len(self._explicit_components['entities'])} entities, "
432
+ f"{len(self._explicit_components['agents'])} agents, "
433
+ f"{len(all_tools)} tools ({explicit_tool_count} explicit, {auto_tool_count} auto-included)"
434
+ )
122
435
 
123
- output_schema_str = None
124
- if config.output_schema:
125
- output_schema_str = json.dumps(config.output_schema)
436
+ # Process functions (explicit + auto-included)
437
+ for func in all_functions:
438
+ config = FunctionRegistry.get(func.__name__)
439
+ if not config:
440
+ logger.warning(f"Function '{func.__name__}' not found in FunctionRegistry")
441
+ continue
126
442
 
127
- # Get metadata with description
443
+ input_schema_str = json.dumps(config.input_schema) if config.input_schema else None
444
+ output_schema_str = json.dumps(config.output_schema) if config.output_schema else None
128
445
  metadata = config.metadata if config.metadata else {}
129
446
 
130
447
  component_info = self._PyComponentInfo(
131
- name=name,
448
+ name=config.name,
132
449
  component_type="function",
133
450
  metadata=metadata,
134
451
  config={},
@@ -137,24 +454,20 @@ class Worker:
137
454
  definition=None,
138
455
  )
139
456
  components.append(component_info)
140
- logger.debug(f"Discovered function: {name}")
141
-
142
- # Discover workflows
143
- for name, config in WorkflowRegistry.all().items():
144
- # Serialize schemas to JSON strings
145
- input_schema_str = None
146
- if config.input_schema:
147
- input_schema_str = json.dumps(config.input_schema)
148
457
 
149
- output_schema_str = None
150
- if config.output_schema:
151
- output_schema_str = json.dumps(config.output_schema)
458
+ # Process workflows
459
+ for workflow in self._explicit_components['workflows']:
460
+ config = WorkflowRegistry.get(workflow.__name__)
461
+ if not config:
462
+ logger.warning(f"Workflow '{workflow.__name__}' not found in WorkflowRegistry")
463
+ continue
152
464
 
153
- # Get metadata with description
465
+ input_schema_str = json.dumps(config.input_schema) if config.input_schema else None
466
+ output_schema_str = json.dumps(config.output_schema) if config.output_schema else None
154
467
  metadata = config.metadata if config.metadata else {}
155
468
 
156
469
  component_info = self._PyComponentInfo(
157
- name=name,
470
+ name=config.name,
158
471
  component_type="workflow",
159
472
  metadata=metadata,
160
473
  config={},
@@ -163,34 +476,14 @@ class Worker:
163
476
  definition=None,
164
477
  )
165
478
  components.append(component_info)
166
- logger.debug(f"Discovered workflow: {name}")
167
-
168
- # Discover tools
169
- for name, tool in ToolRegistry.all().items():
170
- # Serialize schemas to JSON strings
171
- input_schema_str = None
172
- if hasattr(tool, 'input_schema') and tool.input_schema:
173
- input_schema_str = json.dumps(tool.input_schema)
174
-
175
- output_schema_str = None
176
- if hasattr(tool, 'output_schema') and tool.output_schema:
177
- output_schema_str = json.dumps(tool.output_schema)
178
479
 
179
- component_info = self._PyComponentInfo(
180
- name=name,
181
- component_type="tool",
182
- metadata={},
183
- config={},
184
- input_schema=input_schema_str,
185
- output_schema=output_schema_str,
186
- definition=None,
187
- )
188
- components.append(component_info)
189
- logger.debug(f"Discovered tool: {name}")
480
+ # Process entities
481
+ for entity_class in self._explicit_components['entities']:
482
+ entity_type = EntityRegistry.get(entity_class.__name__)
483
+ if not entity_type:
484
+ logger.warning(f"Entity '{entity_class.__name__}' not found in EntityRegistry")
485
+ continue
190
486
 
191
- # Discover entities
192
- for name, entity_type in EntityRegistry.all().items():
193
- # Build method schemas and metadata for each method
194
487
  method_schemas = {}
195
488
  for method_name, (input_schema, output_schema) in entity_type._method_schemas.items():
196
489
  method_metadata = entity_type._method_metadata.get(method_name, {})
@@ -200,43 +493,33 @@ class Worker:
200
493
  "metadata": method_metadata
201
494
  }
202
495
 
203
- # Build metadata dict with methods list and schemas
204
496
  metadata_dict = {
205
497
  "methods": json.dumps(list(entity_type._method_schemas.keys())),
206
498
  "method_schemas": json.dumps(method_schemas)
207
499
  }
208
500
 
209
501
  component_info = self._PyComponentInfo(
210
- name=name,
502
+ name=entity_type.name,
211
503
  component_type="entity",
212
504
  metadata=metadata_dict,
213
505
  config={},
214
- input_schema=None, # Entities have per-method schemas in metadata
506
+ input_schema=None,
215
507
  output_schema=None,
216
508
  definition=None,
217
509
  )
218
510
  components.append(component_info)
219
- logger.debug(f"Discovered entity: {name} with methods: {list(entity_type._method_schemas.keys())}")
220
511
 
221
- # Discover agents
222
- for name, agent in AgentRegistry.all().items():
223
- # Serialize schemas to JSON strings
224
- input_schema_str = None
225
- if hasattr(agent, 'input_schema') and agent.input_schema:
226
- input_schema_str = json.dumps(agent.input_schema)
512
+ # Process agents
513
+ for agent in self._explicit_components['agents']:
514
+ input_schema_str = json.dumps(agent.input_schema) if hasattr(agent, 'input_schema') and agent.input_schema else None
515
+ output_schema_str = json.dumps(agent.output_schema) if hasattr(agent, 'output_schema') and agent.output_schema else None
227
516
 
228
- output_schema_str = None
229
- if hasattr(agent, 'output_schema') and agent.output_schema:
230
- output_schema_str = json.dumps(agent.output_schema)
231
-
232
- # Get metadata (includes description and model info)
233
517
  metadata_dict = agent.metadata if hasattr(agent, 'metadata') else {}
234
- # Add tools list to metadata
235
518
  if hasattr(agent, 'tools'):
236
519
  metadata_dict["tools"] = json.dumps(list(agent.tools.keys()))
237
520
 
238
521
  component_info = self._PyComponentInfo(
239
- name=name,
522
+ name=agent.name,
240
523
  component_type="agent",
241
524
  metadata=metadata_dict,
242
525
  config={},
@@ -245,9 +528,24 @@ class Worker:
245
528
  definition=None,
246
529
  )
247
530
  components.append(component_info)
248
- logger.debug(f"Discovered agent: {name}")
249
531
 
250
- logger.info(f"Discovered {len(components)} components")
532
+ # Process tools (explicit + auto-included)
533
+ for tool in all_tools:
534
+ input_schema_str = json.dumps(tool.input_schema) if hasattr(tool, 'input_schema') and tool.input_schema else None
535
+ output_schema_str = json.dumps(tool.output_schema) if hasattr(tool, 'output_schema') and tool.output_schema else None
536
+
537
+ component_info = self._PyComponentInfo(
538
+ name=tool.name,
539
+ component_type="tool",
540
+ metadata={},
541
+ config={},
542
+ input_schema=input_schema_str,
543
+ output_schema=output_schema_str,
544
+ definition=None,
545
+ )
546
+ components.append(component_info)
547
+
548
+ logger.info(f"Discovered {len(components)} total components")
251
549
  return components
252
550
 
253
551
  def _create_message_handler(self):
agnt5/workflow.py CHANGED
@@ -463,6 +463,7 @@ def workflow(
463
463
  _func: Optional[Callable[..., Any]] = None,
464
464
  *,
465
465
  name: Optional[str] = None,
466
+ chat: bool = False,
466
467
  ) -> Callable[..., Any]:
467
468
  """
468
469
  Decorator to mark a function as an AGNT5 durable workflow.
@@ -472,8 +473,9 @@ def workflow(
472
473
 
473
474
  Args:
474
475
  name: Custom workflow name (default: function's __name__)
476
+ chat: Enable chat mode for multi-turn conversation workflows (default: False)
475
477
 
476
- Example:
478
+ Example (standard workflow):
477
479
  @workflow
478
480
  async def process_order(ctx: WorkflowContext, order_id: str) -> dict:
479
481
  # Durable state - survives crashes
@@ -491,104 +493,10 @@ def workflow(
491
493
 
492
494
  ctx.state.set("status", "completed")
493
495
  return {"status": ctx.state.get("status")}
494
- """
495
-
496
- def decorator(func: Callable[..., Any]) -> Callable[..., Any]:
497
- # Get workflow name
498
- workflow_name = name or func.__name__
499
-
500
- # Validate function signature
501
- sig = inspect.signature(func)
502
- params = list(sig.parameters.values())
503
-
504
- if not params or params[0].name != "ctx":
505
- raise ValueError(
506
- f"Workflow '{workflow_name}' must have 'ctx: WorkflowContext' as first parameter"
507
- )
508
-
509
- # Convert sync to async if needed
510
- if inspect.iscoroutinefunction(func):
511
- handler_func = cast(HandlerFunc, func)
512
- else:
513
- # Wrap sync function in async
514
- @functools.wraps(func)
515
- async def async_wrapper(*args: Any, **kwargs: Any) -> Any:
516
- return func(*args, **kwargs)
517
-
518
- handler_func = cast(HandlerFunc, async_wrapper)
519
-
520
- # Extract schemas from type hints
521
- input_schema, output_schema = extract_function_schemas(func)
522
-
523
- # Extract metadata (description, etc.)
524
- metadata = extract_function_metadata(func)
525
-
526
- # Register workflow
527
- config = WorkflowConfig(
528
- name=workflow_name,
529
- handler=handler_func,
530
- input_schema=input_schema,
531
- output_schema=output_schema,
532
- metadata=metadata,
533
- )
534
- WorkflowRegistry.register(config)
535
-
536
- # Create wrapper that provides context
537
- @functools.wraps(func)
538
- async def wrapper(*args: Any, **kwargs: Any) -> Any:
539
- # Create WorkflowEntity and WorkflowContext if not provided
540
- if not args or not isinstance(args[0], WorkflowContext):
541
- # Auto-create workflow entity and context for direct workflow calls
542
- run_id = f"workflow-{uuid.uuid4().hex[:8]}"
543
-
544
- # Create WorkflowEntity to manage state
545
- workflow_entity = WorkflowEntity(run_id=run_id)
546
-
547
- # Create WorkflowContext that wraps the entity
548
- ctx = WorkflowContext(
549
- workflow_entity=workflow_entity,
550
- run_id=run_id,
551
- )
552
-
553
- # Execute workflow
554
- return await handler_func(ctx, *args, **kwargs)
555
- else:
556
- # WorkflowContext provided - use it
557
- return await handler_func(*args, **kwargs)
558
-
559
- # Store config on wrapper for introspection
560
- wrapper._agnt5_config = config # type: ignore
561
- return wrapper
562
496
 
563
- # Handle both @workflow and @workflow(...) syntax
564
- if _func is None:
565
- return decorator
566
- else:
567
- return decorator(_func)
568
-
569
-
570
- def chatflow(
571
- _func: Optional[Callable[..., Any]] = None,
572
- *,
573
- name: Optional[str] = None,
574
- ) -> Callable[..., Any]:
575
- """
576
- Decorator to mark a function as an AGNT5 chat-enabled workflow.
577
-
578
- Identical to @workflow but adds metadata {"chat": "true"} to indicate
579
- this workflow is designed for multi-turn conversation scenarios.
580
-
581
- The platform can use this metadata to:
582
- - Enable session affinity and sticky routing
583
- - Apply conversation-specific optimizations
584
- - Track chat-specific metrics (turn count, conversation length)
585
-
586
- Args:
587
- name: Custom workflow name (default: function's __name__)
588
-
589
- Example:
590
- @chatflow
591
- async def customer_support_chat(ctx: WorkflowContext, message: str) -> dict:
497
+ Example (chat workflow):
498
+ @workflow(chat=True)
499
+ async def customer_support(ctx: WorkflowContext, message: str) -> dict:
592
500
  # Initialize conversation state
593
501
  if not ctx.state.get("messages"):
594
502
  ctx.state.set("messages", [])
@@ -618,7 +526,7 @@ def chatflow(
618
526
 
619
527
  if not params or params[0].name != "ctx":
620
528
  raise ValueError(
621
- f"Chatflow '{workflow_name}' must have 'ctx: WorkflowContext' as first parameter"
529
+ f"Workflow '{workflow_name}' must have 'ctx: WorkflowContext' as first parameter"
622
530
  )
623
531
 
624
532
  # Convert sync to async if needed
@@ -638,10 +546,11 @@ def chatflow(
638
546
  # Extract metadata (description, etc.)
639
547
  metadata = extract_function_metadata(func)
640
548
 
641
- # Add chat metadata - THIS IS THE KEY DIFFERENCE FROM @workflow
642
- metadata["chat"] = "true"
549
+ # Add chat metadata if chat mode is enabled
550
+ if chat:
551
+ metadata["chat"] = "true"
643
552
 
644
- # Register as workflow (chatflows are workflows with chat metadata)
553
+ # Register workflow
645
554
  config = WorkflowConfig(
646
555
  name=workflow_name,
647
556
  handler=handler_func,
@@ -656,8 +565,8 @@ def chatflow(
656
565
  async def wrapper(*args: Any, **kwargs: Any) -> Any:
657
566
  # Create WorkflowEntity and WorkflowContext if not provided
658
567
  if not args or not isinstance(args[0], WorkflowContext):
659
- # Auto-create workflow entity and context for direct chatflow calls
660
- run_id = f"chatflow-{uuid.uuid4().hex[:8]}"
568
+ # Auto-create workflow entity and context for direct workflow calls
569
+ run_id = f"workflow-{uuid.uuid4().hex[:8]}"
661
570
 
662
571
  # Create WorkflowEntity to manage state
663
572
  workflow_entity = WorkflowEntity(run_id=run_id)
@@ -668,7 +577,7 @@ def chatflow(
668
577
  run_id=run_id,
669
578
  )
670
579
 
671
- # Execute chatflow
580
+ # Execute workflow
672
581
  return await handler_func(ctx, *args, **kwargs)
673
582
  else:
674
583
  # WorkflowContext provided - use it
@@ -678,8 +587,10 @@ def chatflow(
678
587
  wrapper._agnt5_config = config # type: ignore
679
588
  return wrapper
680
589
 
681
- # Handle both @chatflow and @chatflow(...) syntax
590
+ # Handle both @workflow and @workflow(...) syntax
682
591
  if _func is None:
683
592
  return decorator
684
593
  else:
685
594
  return decorator(_func)
595
+
596
+
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: agnt5
3
- Version: 0.2.5
3
+ Version: 0.2.6
4
4
  Classifier: Development Status :: 3 - Alpha
5
5
  Classifier: Intended Audience :: Developers
6
6
  Classifier: Programming Language :: Python :: 3
@@ -1,22 +1,22 @@
1
- agnt5-0.2.5.dist-info/METADATA,sha256=zHVYaWjmmQbnDeJvr59rQNan70yGDUzjKg4cyPLfGr8,994
2
- agnt5-0.2.5.dist-info/WHEEL,sha256=vpqC0tRn_8bTHidvtrPbrnFQPZnrhuKzsjDdeKwCd58,102
3
- agnt5/__init__.py,sha256=XUixnW140lI-_9h1rLtN4jBWXIDL3EgnM7jmiNzGrYk,2037
1
+ agnt5-0.2.6.dist-info/METADATA,sha256=VoL7dDFb9P5ojrU2jUC4r3_qFDea7idhZEJNOUKVdyY,994
2
+ agnt5-0.2.6.dist-info/WHEEL,sha256=vpqC0tRn_8bTHidvtrPbrnFQPZnrhuKzsjDdeKwCd58,102
3
+ agnt5/__init__.py,sha256=NAziyM0ZKahdzqAP2edFHTbVHYVdIjEGXm0oEHdmiuo,2011
4
4
  agnt5/_compat.py,sha256=BGuy3v5VDOHVa5f3Z-C22iMN19lAt0mPmXwF3qSSWxI,369
5
- agnt5/_core.abi3.so,sha256=HGQaXAplREgIbUBvtNhmrmHyTkDfh3inERsj90LiSVM,11928992
5
+ agnt5/_core.abi3.so,sha256=5nl1dY6TIBSVkfyWKayn0q21kS_pnYOLLehBjEiBSCk,11945360
6
6
  agnt5/_retry_utils.py,sha256=loHsWY5BR4wZy57IzcDEjQAy88DHVwVIr25Cn1d9GPA,5801
7
7
  agnt5/_schema_utils.py,sha256=MR67RW757T4Oq2Jqf4kB61H_b51zwaf3CLWELnkngRo,9572
8
8
  agnt5/_telemetry.py,sha256=bIY9AvBRjJBTHoBPbfR6X1OgaiUf-T0vCoi0_snsWXA,5957
9
- agnt5/agent.py,sha256=BAhYHKD5YuZXhNZaqeoN7EXKTtzF8OPhCJhpcWQS1YM,27837
9
+ agnt5/agent.py,sha256=I4KqlW02ssRZ3js2lxhQR7_kqhV60GHFqxje88cWhM4,28837
10
10
  agnt5/client.py,sha256=kXksazgxdVXWaG9OkjJA4cWruNtcS-ENhtnkrIdw-Nk,23212
11
11
  agnt5/context.py,sha256=S2OzPkhn_jnqSWfT21mSYOux8vHaLKQxcAvggZDHQek,2378
12
12
  agnt5/entity.py,sha256=dhdxXUxED79u3OlX9yw-2TLCC9VqBcJqES2kx-fDChs,19041
13
13
  agnt5/exceptions.py,sha256=mZ0q-NK6OKhYxgwBJpIbgpgzk-CJaFIHDbp1EE-pS7I,925
14
14
  agnt5/function.py,sha256=f1vaAlJRwuo8cxCOGEd8XPido00mOhlPS8UJJx-6hJI,11041
15
- agnt5/lm.py,sha256=1ufT0TGj_Ra1FXCflgxKfMh1qPQ_DV9p7BLlZGWI4m4,22062
15
+ agnt5/lm.py,sha256=9dFjd6eQ3f3lFZe7H7rWZherYiP_58MT1F5xpwD8PCg,23195
16
16
  agnt5/tool.py,sha256=uc4L-Q9QyLzQDe-MZKk2Wo3o5e-mK8tfaQwVDgQdouQ,13133
17
17
  agnt5/tracing.py,sha256=Mh2-OfnQM61lM_P8gxJstafdsUA8Gxoo1lP-Joxhub8,5980
18
18
  agnt5/types.py,sha256=Zb71ZMwvrt1p4SH18cAKunp2y5tao_W5_jGYaPDejQo,2840
19
19
  agnt5/version.py,sha256=rOq1mObLihnnKgKqBrwZA0zwOPudEKVFcW1a48ynkqc,573
20
- agnt5/worker.py,sha256=_BnqqqvQE16FuezFbPda9FoiKAwcezSaxxNjU0rTjhs,33576
21
- agnt5/workflow.py,sha256=GGCyt34W1DTSL8zjcj54qI7SA6Fn3HyQZ1XcMTlCMdI,22748
22
- agnt5-0.2.5.dist-info/RECORD,,
20
+ agnt5/worker.py,sha256=GWskOXOmho_HZdhwAhimIWDHfT809Y3h1W_8s8UKt-w,46244
21
+ agnt5/workflow.py,sha256=sU8Gk7unxE_Gla7Fe-KlXfcBvYa2326GciuoR26CCr0,19585
22
+ agnt5-0.2.6.dist-info/RECORD,,
File without changes