daita-agents 0.1.1__tar.gz → 0.1.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of daita-agents might be problematic. Click here for more details.

Files changed (96) hide show
  1. {daita_agents-0.1.1 → daita_agents-0.1.2}/PKG-INFO +1 -1
  2. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/agents/base.py +23 -7
  3. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/agents/substrate.py +9 -4
  4. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/core/relay.py +119 -42
  5. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/core/workflow.py +35 -25
  6. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita_agents.egg-info/PKG-INFO +1 -1
  7. {daita_agents-0.1.1 → daita_agents-0.1.2}/pyproject.toml +1 -1
  8. {daita_agents-0.1.1 → daita_agents-0.1.2}/LICENSE +0 -0
  9. {daita_agents-0.1.1 → daita_agents-0.1.2}/README.md +0 -0
  10. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/__init__.py +0 -0
  11. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/agents/__init__.py +0 -0
  12. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/cli/__init__.py +0 -0
  13. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/cli/__main__.py +0 -0
  14. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/cli/ascii_art.py +0 -0
  15. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/cli/core/__init__.py +0 -0
  16. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/cli/core/create.py +0 -0
  17. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/cli/core/deploy.py +0 -0
  18. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/cli/core/deployments.py +0 -0
  19. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/cli/core/import_detector.py +0 -0
  20. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/cli/core/init.py +0 -0
  21. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/cli/core/logs.py +0 -0
  22. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/cli/core/managed_deploy.py +0 -0
  23. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/cli/core/run.py +0 -0
  24. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/cli/core/status.py +0 -0
  25. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/cli/core/test.py +0 -0
  26. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/cli/core/webhooks.py +0 -0
  27. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/cli/main.py +0 -0
  28. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/cli/utils.py +0 -0
  29. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/config/__init__.py +0 -0
  30. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/config/base.py +0 -0
  31. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/config/settings.py +0 -0
  32. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/core/__init__.py +0 -0
  33. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/core/decision_tracing.py +0 -0
  34. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/core/exceptions.py +0 -0
  35. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/core/focus.py +0 -0
  36. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/core/interfaces.py +0 -0
  37. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/core/plugin_tracing.py +0 -0
  38. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/core/reliability.py +0 -0
  39. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/core/scaling.py +0 -0
  40. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/core/tools.py +0 -0
  41. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/core/tracing.py +0 -0
  42. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/display/__init__.py +0 -0
  43. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/display/console.py +0 -0
  44. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/execution/__init__.py +0 -0
  45. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/execution/client.py +0 -0
  46. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/execution/exceptions.py +0 -0
  47. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/execution/models.py +0 -0
  48. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/llm/__init__.py +0 -0
  49. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/llm/anthropic.py +0 -0
  50. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/llm/base.py +0 -0
  51. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/llm/factory.py +0 -0
  52. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/llm/gemini.py +0 -0
  53. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/llm/grok.py +0 -0
  54. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/llm/mock.py +0 -0
  55. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/llm/openai.py +0 -0
  56. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/plugins/__init__.py +0 -0
  57. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/plugins/base.py +0 -0
  58. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/plugins/base_db.py +0 -0
  59. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/plugins/elasticsearch.py +0 -0
  60. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/plugins/mcp.py +0 -0
  61. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/plugins/mongodb.py +0 -0
  62. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/plugins/mysql.py +0 -0
  63. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/plugins/postgresql.py +0 -0
  64. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/plugins/redis_messaging.py +0 -0
  65. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/plugins/rest.py +0 -0
  66. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/plugins/s3.py +0 -0
  67. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/plugins/slack.py +0 -0
  68. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita/utils/__init__.py +0 -0
  69. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita_agents.egg-info/SOURCES.txt +0 -0
  70. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita_agents.egg-info/dependency_links.txt +0 -0
  71. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita_agents.egg-info/entry_points.txt +0 -0
  72. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita_agents.egg-info/requires.txt +0 -0
  73. {daita_agents-0.1.1 → daita_agents-0.1.2}/daita_agents.egg-info/top_level.txt +0 -0
  74. {daita_agents-0.1.1 → daita_agents-0.1.2}/setup.cfg +0 -0
  75. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_agent_tools.py +0 -0
  76. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_api_integration.py +0 -0
  77. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_cli.py +0 -0
  78. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_config.py +0 -0
  79. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_configuration.py +0 -0
  80. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_focus_system.py +0 -0
  81. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_llm_providers.py +0 -0
  82. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_mongodb_plugin.py +0 -0
  83. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_mysql_plugin.py +0 -0
  84. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_phase3_strengthening.py +0 -0
  85. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_plugin_system.py +0 -0
  86. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_postgres_plugin.py +0 -0
  87. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_postgresql_direct_usage.py +0 -0
  88. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_postgresql_plugin_tools.py +0 -0
  89. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_relay_workflow_fixes.py +0 -0
  90. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_rest_plugin.py +0 -0
  91. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_s3_critical.py +0 -0
  92. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_s3_improvements.py +0 -0
  93. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_substrate_agent.py +0 -0
  94. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_substrate_agent_no_mcp.py +0 -0
  95. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_substrate_agent_with_tools.py +0 -0
  96. {daita_agents-0.1.1 → daita_agents-0.1.2}/tests/test_workflow.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: daita-agents
3
- Version: 0.1.1
3
+ Version: 0.1.2
4
4
  Summary: Daita Agents - Data focused AI agent framework with free local use and premium hosted enterprise features
5
5
  Author-email: Daita <support@daita-tech.io>
6
6
  License: Proprietary
@@ -207,6 +207,9 @@ class BaseAgent(Agent):
207
207
  context: Dict[str, Any]
208
208
  ) -> Dict[str, Any]:
209
209
  """Process task with full reliability features."""
210
+ # Track processing time
211
+ start_time = time.time()
212
+
210
213
  # Check backpressure first
211
214
  if self.backpressure_controller and not await self.backpressure_controller.acquire_processing_slot():
212
215
  raise BackpressureError(
@@ -214,7 +217,7 @@ class BaseAgent(Agent):
214
217
  agent_id=self.agent_id,
215
218
  queue_size=self.backpressure_controller.task_queue.qsize()
216
219
  )
217
-
220
+
218
221
  # Create task in task manager
219
222
  task_id = None
220
223
  if self.task_manager:
@@ -227,7 +230,7 @@ class BaseAgent(Agent):
227
230
  context['task_id'] = task_id
228
231
  # Update task status to running
229
232
  await self.task_manager.update_status(task_id, TaskStatus.RUNNING)
230
-
233
+
231
234
  try:
232
235
  # Automatically trace the entire operation
233
236
  async with self.trace_manager.span(
@@ -241,17 +244,22 @@ class BaseAgent(Agent):
241
244
  retry_enabled=str(self.config.retry_enabled),
242
245
  reliability_enabled="true"
243
246
  ) as span_id:
244
-
247
+
245
248
  # Execute with or without retry logic
246
249
  if self.config.retry_enabled:
247
250
  result = await self._process_with_retry(span_id, task, data, context)
248
251
  else:
249
252
  result = await self._process_fail_fast(span_id, task, data, context)
250
-
253
+
254
+ # Add processing time to result
255
+ processing_time_ms = (time.time() - start_time) * 1000
256
+ if isinstance(result, dict):
257
+ result['processing_time_ms'] = processing_time_ms
258
+
251
259
  # Update task status to completed
252
260
  if task_id and self.task_manager:
253
261
  await self.task_manager.update_status(task_id, TaskStatus.COMPLETED)
254
-
262
+
255
263
  return result
256
264
 
257
265
  except Exception as e:
@@ -272,6 +280,9 @@ class BaseAgent(Agent):
272
280
  context: Dict[str, Any]
273
281
  ) -> Dict[str, Any]:
274
282
  """Process task without reliability features (original behavior)."""
283
+ # Track processing time
284
+ start_time = time.time()
285
+
275
286
  # Automatically trace the entire operation
276
287
  async with self.trace_manager.span(
277
288
  operation_name=f"agent_process_{task}",
@@ -283,13 +294,18 @@ class BaseAgent(Agent):
283
294
  retry_enabled=str(self.config.retry_enabled),
284
295
  reliability_enabled="false"
285
296
  ) as span_id:
286
-
297
+
287
298
  # Execute with or without retry logic
288
299
  if self.config.retry_enabled:
289
300
  result = await self._process_with_retry(span_id, task, data, context)
290
301
  else:
291
302
  result = await self._process_fail_fast(span_id, task, data, context)
292
-
303
+
304
+ # Add processing time to result
305
+ processing_time_ms = (time.time() - start_time) * 1000
306
+ if isinstance(result, dict):
307
+ result['processing_time_ms'] = processing_time_ms
308
+
293
309
  return result
294
310
 
295
311
  async def _process_with_retry(
@@ -442,15 +442,20 @@ class SubstrateAgent(BaseAgent):
442
442
  return response
443
443
 
444
444
  def _register_builtin_handlers(self):
445
- """Register built-in handlers for common tasks."""
446
- self.handlers.update({
445
+ """Register built-in handlers for common tasks (without overwriting custom handlers)."""
446
+ builtin_handlers = {
447
447
  'analyze': self._handle_analyze,
448
448
  'transform': self._handle_transform,
449
449
  'process_data': self._handle_process_data,
450
450
  'custom': self._handle_custom,
451
451
  'relay_message': self._handle_relay_message,
452
452
  'llm_query': self._handle_llm_query
453
- })
453
+ }
454
+
455
+ # Only add built-in handlers if custom handler doesn't already exist
456
+ for task, handler in builtin_handlers.items():
457
+ if task not in self.handlers:
458
+ self.handlers[task] = handler
454
459
 
455
460
  async def _process_once(
456
461
  self,
@@ -489,7 +494,7 @@ class SubstrateAgent(BaseAgent):
489
494
  else:
490
495
  # No handler and no LLM available - use default behavior
491
496
  return self._handle_default(data, context, self)
492
-
497
+
493
498
  # Execute the handler
494
499
  try:
495
500
  if asyncio.iscoroutinefunction(handler):
@@ -59,10 +59,11 @@ class MessageStatus(str, Enum):
59
59
 
60
60
  @dataclass
61
61
  class ReliableMessage:
62
- """A message with acknowledgment tracking."""
62
+ """A message with acknowledgment tracking and metadata."""
63
63
  id: str
64
64
  channel: str
65
65
  data: Any
66
+ metadata: Dict[str, Any] = field(default_factory=dict)
66
67
  publisher: Optional[str] = None
67
68
  timestamp: float = field(default_factory=time.time)
68
69
  status: MessageStatus = MessageStatus.PENDING
@@ -101,7 +102,8 @@ class RelayManager:
101
102
  self.channels: Dict[str, deque] = {}
102
103
 
103
104
  # Subscribers: channel_name -> set of callbacks
104
- self.subscribers: Dict[str, weakref.WeakSet] = {}
105
+ # Using regular set instead of WeakSet to prevent premature garbage collection
106
+ self.subscribers: Dict[str, set] = {}
105
107
 
106
108
  # Per-channel locks to prevent race conditions between publish and subscribe
107
109
  self._channel_locks: Dict[str, asyncio.Lock] = {}
@@ -143,8 +145,60 @@ class RelayManager:
143
145
  """Ensure channel exists."""
144
146
  if channel not in self.channels:
145
147
  self.channels[channel] = deque(maxlen=self.max_messages_per_channel)
146
- self.subscribers[channel] = weakref.WeakSet()
148
+ self.subscribers[channel] = set() # Regular set to prevent garbage collection
147
149
  self._channel_locks[channel] = asyncio.Lock()
150
+
151
+ def _extract_metadata(self, agent_response: Dict[str, Any], publisher: Optional[str]) -> Dict[str, Any]:
152
+ """
153
+ Extract metadata from agent response for context propagation.
154
+
155
+ Automatically captures metrics, token usage, confidence scores, and other
156
+ context that downstream agents need to make intelligent decisions.
157
+ """
158
+ metadata = {
159
+ 'upstream_agent': publisher or agent_response.get('agent_name'),
160
+ 'upstream_agent_id': agent_response.get('agent_id'),
161
+ 'timestamp': agent_response.get('timestamp', time.time()),
162
+ }
163
+
164
+ # Extract processing metrics if available
165
+ if 'processing_time_ms' in agent_response:
166
+ metadata['processing_time_ms'] = agent_response['processing_time_ms']
167
+
168
+ # Extract token usage if available
169
+ if 'token_usage' in agent_response:
170
+ metadata['token_usage'] = agent_response['token_usage']
171
+
172
+ # Extract confidence score if available
173
+ if 'confidence_score' in agent_response:
174
+ metadata['confidence_score'] = agent_response['confidence_score']
175
+ elif 'confidence' in agent_response:
176
+ metadata['confidence_score'] = agent_response['confidence']
177
+
178
+ # Extract record count if available
179
+ if 'record_count' in agent_response:
180
+ metadata['record_count'] = agent_response['record_count']
181
+
182
+ # Extract error information if available
183
+ if 'error_count' in agent_response:
184
+ metadata['error_count'] = agent_response['error_count']
185
+
186
+ # Extract correlation ID for distributed tracing
187
+ if 'correlation_id' in agent_response:
188
+ metadata['correlation_id'] = agent_response['correlation_id']
189
+ elif 'context' in agent_response and isinstance(agent_response['context'], dict):
190
+ metadata['correlation_id'] = agent_response['context'].get('correlation_id', str(uuid.uuid4()))
191
+ else:
192
+ metadata['correlation_id'] = str(uuid.uuid4())
193
+
194
+ # Extract retry information if available
195
+ if 'retry_info' in agent_response:
196
+ metadata['retry_info'] = agent_response['retry_info']
197
+
198
+ # Extract status (success/error)
199
+ metadata['status'] = agent_response.get('status', 'unknown')
200
+
201
+ return metadata
148
202
 
149
203
  async def publish(
150
204
  self,
@@ -154,11 +208,11 @@ class RelayManager:
154
208
  require_ack: Optional[bool] = None
155
209
  ) -> Optional[str]:
156
210
  """
157
- Publish data to a channel.
211
+ Publish data to a channel with automatic metadata propagation.
158
212
 
159
213
  Args:
160
214
  channel: Channel name
161
- agent_response: Full agent response (we extract 'result' field)
215
+ agent_response: Full agent response (we extract 'result' field and metadata)
162
216
  publisher: Optional publisher identifier
163
217
  require_ack: Whether this message requires acknowledgment (overrides global setting)
164
218
 
@@ -175,29 +229,33 @@ class RelayManager:
175
229
  logger.warning(f"No 'result' field found in agent response for channel '{channel}'")
176
230
  result_data = agent_response # Fallback to full response if no result field
177
231
 
232
+ # Extract metadata for context propagation
233
+ metadata = self._extract_metadata(agent_response, publisher)
234
+
178
235
  self._ensure_channel(channel)
179
236
 
180
237
  # Determine if we need reliability
181
238
  needs_reliability = require_ack if require_ack is not None else self.enable_reliability
182
239
 
183
240
  if needs_reliability:
184
- return await self._publish_reliable(channel, result_data, agent_response, publisher)
241
+ return await self._publish_reliable(channel, result_data, metadata, publisher)
185
242
  else:
186
- return await self._publish_fire_and_forget(channel, result_data, agent_response, publisher)
243
+ return await self._publish_fire_and_forget(channel, result_data, metadata, publisher)
187
244
 
188
245
  async def _publish_fire_and_forget(
189
246
  self,
190
247
  channel: str,
191
248
  result_data: Any,
192
- agent_response: Dict[str, Any],
249
+ metadata: Dict[str, Any],
193
250
  publisher: Optional[str]
194
251
  ) -> None:
195
- """Publish message without reliability features (original behavior)."""
252
+ """Publish message without reliability features with metadata propagation."""
196
253
  # Use per-channel lock to make publish atomic
197
254
  async with self._channel_locks[channel]:
198
- # Create message with result data only
255
+ # Create message with result data and metadata
199
256
  message = {
200
257
  'data': result_data,
258
+ 'metadata': metadata,
201
259
  'publisher': publisher,
202
260
  'timestamp': time.time()
203
261
  }
@@ -205,8 +263,8 @@ class RelayManager:
205
263
  # Store result message
206
264
  self.channels[channel].append(message)
207
265
 
208
- # Notify subscribers with just the result data (while holding lock)
209
- await self._notify_subscribers(channel, result_data)
266
+ # Notify subscribers with result data and metadata (while holding lock)
267
+ await self._notify_subscribers(channel, result_data, metadata)
210
268
 
211
269
  logger.debug(f"Published result to channel '{channel}' from {publisher}")
212
270
  return None
@@ -215,16 +273,17 @@ class RelayManager:
215
273
  self,
216
274
  channel: str,
217
275
  result_data: Any,
218
- agent_response: Dict[str, Any],
276
+ metadata: Dict[str, Any],
219
277
  publisher: Optional[str]
220
278
  ) -> str:
221
- """Publish message with reliability features."""
279
+ """Publish message with reliability features and metadata propagation."""
222
280
  # Create reliable message
223
281
  message_id = uuid.uuid4().hex
224
282
  reliable_message = ReliableMessage(
225
283
  id=message_id,
226
284
  channel=channel,
227
285
  data=result_data,
286
+ metadata=metadata,
228
287
  publisher=publisher,
229
288
  ack_timeout=self.default_ack_timeout
230
289
  )
@@ -234,10 +293,11 @@ class RelayManager:
234
293
 
235
294
  # Use per-channel lock to make publish atomic
236
295
  async with self._channel_locks[channel]:
237
- # Create message for channel storage
296
+ # Create message for channel storage with metadata
238
297
  message = {
239
298
  'id': message_id,
240
299
  'data': result_data,
300
+ 'metadata': metadata,
241
301
  'publisher': publisher,
242
302
  'timestamp': time.time(),
243
303
  'requires_ack': True
@@ -254,66 +314,73 @@ class RelayManager:
254
314
  self.message_timeouts[message_id] = timeout_task
255
315
 
256
316
  # Notify subscribers with message ID for acknowledgment (while holding lock)
257
- await self._notify_subscribers_reliable(channel, result_data, message_id)
317
+ await self._notify_subscribers_reliable(channel, result_data, metadata, message_id)
258
318
 
259
319
  logger.debug(f"Published reliable message {message_id} to channel '{channel}' from {publisher}")
260
320
  return message_id
261
321
 
262
- async def _notify_subscribers(self, channel: str, result_data: Any) -> None:
263
- """Notify all subscribers of a channel with result data only."""
322
+ async def _notify_subscribers(self, channel: str, result_data: Any, metadata: Dict[str, Any]) -> None:
323
+ """Notify all subscribers of a channel with result data and metadata."""
264
324
  if channel not in self.subscribers:
265
325
  return
266
-
326
+
267
327
  # Get snapshot of subscribers to avoid modification during iteration
268
328
  subscriber_list = list(self.subscribers[channel])
269
-
329
+
270
330
  if not subscriber_list:
271
331
  return
272
-
332
+
273
333
  # Notify all subscribers concurrently
274
334
  tasks = []
275
335
  for subscriber in subscriber_list:
276
- task = asyncio.create_task(self._call_subscriber(subscriber, result_data))
336
+ task = asyncio.create_task(self._call_subscriber(subscriber, result_data, metadata))
277
337
  task.add_done_callback(lambda t: t.exception() if not t.cancelled() else None)
278
338
  tasks.append(task)
279
-
339
+
280
340
  # Wait for all notifications to complete
281
341
  if tasks:
282
342
  await asyncio.gather(*tasks, return_exceptions=True)
283
343
 
284
- async def _notify_subscribers_reliable(self, channel: str, result_data: Any, message_id: str) -> None:
285
- """Notify all subscribers of a reliable message."""
344
+ async def _notify_subscribers_reliable(self, channel: str, result_data: Any, metadata: Dict[str, Any], message_id: str) -> None:
345
+ """Notify all subscribers of a reliable message with metadata."""
286
346
  if channel not in self.subscribers:
287
347
  return
288
-
348
+
289
349
  # Get snapshot of subscribers to avoid modification during iteration
290
350
  subscriber_list = list(self.subscribers[channel])
291
-
351
+
292
352
  if not subscriber_list:
293
353
  return
294
-
295
- # Notify all subscribers concurrently with message ID
354
+
355
+ # Notify all subscribers concurrently with message ID and metadata
296
356
  tasks = []
297
357
  for subscriber in subscriber_list:
298
358
  task = asyncio.create_task(
299
- self._call_subscriber_reliable(subscriber, result_data, message_id)
359
+ self._call_subscriber_reliable(subscriber, result_data, metadata, message_id)
300
360
  )
301
361
  task.add_done_callback(lambda t: t.exception() if not t.cancelled() else None)
302
362
  tasks.append(task)
303
-
363
+
304
364
  # Wait for all notifications to complete
305
365
  if tasks:
306
366
  await asyncio.gather(*tasks, return_exceptions=True)
307
367
 
308
- async def _call_subscriber_reliable(self, callback: Callable, result_data: Any, message_id: str) -> None:
309
- """Safely call a subscriber callback for reliable message."""
368
+ async def _call_subscriber_reliable(self, callback: Callable, result_data: Any, metadata: Dict[str, Any], message_id: str) -> None:
369
+ """Safely call a subscriber callback for reliable message with metadata."""
310
370
  try:
311
371
  if asyncio.iscoroutinefunction(callback):
312
- # Check if callback supports message_id parameter
372
+ # Check callback signature to determine what parameters to pass
313
373
  import inspect
314
374
  sig = inspect.signature(callback)
315
- if 'message_id' in sig.parameters:
375
+ params = list(sig.parameters.keys())
376
+
377
+ # Call with appropriate parameters based on signature
378
+ if 'message_id' in params and 'metadata' in params:
379
+ await callback(result_data, metadata=metadata, message_id=message_id)
380
+ elif 'message_id' in params:
316
381
  await callback(result_data, message_id=message_id)
382
+ elif 'metadata' in params:
383
+ await callback(result_data, metadata=metadata)
317
384
  else:
318
385
  await callback(result_data)
319
386
  else:
@@ -325,11 +392,20 @@ class RelayManager:
325
392
  # NACK the message on callback error
326
393
  await self.nack_message(message_id, str(e))
327
394
 
328
- async def _call_subscriber(self, callback: Callable, result_data: Any) -> None:
329
- """Safely call a subscriber callback with result data."""
395
+ async def _call_subscriber(self, callback: Callable, result_data: Any, metadata: Dict[str, Any]) -> None:
396
+ """Safely call a subscriber callback with result data and metadata."""
330
397
  try:
331
398
  if asyncio.iscoroutinefunction(callback):
332
- await callback(result_data)
399
+ # Check callback signature to determine what parameters to pass
400
+ import inspect
401
+ sig = inspect.signature(callback)
402
+ params = list(sig.parameters.keys())
403
+
404
+ # Call with metadata if callback accepts it
405
+ if 'metadata' in params:
406
+ await callback(result_data, metadata=metadata)
407
+ else:
408
+ await callback(result_data)
333
409
  else:
334
410
  # Run sync callback in thread pool
335
411
  loop = asyncio.get_event_loop()
@@ -606,23 +682,24 @@ class RelayManager:
606
682
  retry_message = {
607
683
  'id': message.id,
608
684
  'data': message.data,
685
+ 'metadata': message.metadata,
609
686
  'publisher': message.publisher,
610
687
  'timestamp': message.timestamp,
611
688
  'requires_ack': True
612
689
  }
613
-
690
+
614
691
  # Add to channel
615
692
  self.channels[message.channel].append(retry_message)
616
-
693
+
617
694
  # Set up new timeout task for the retry
618
695
  timeout_task = asyncio.create_task(
619
696
  self._handle_message_timeout(message.id, message.ack_timeout)
620
697
  )
621
698
  timeout_task.add_done_callback(lambda t: t.exception() if not t.cancelled() else None)
622
699
  self.message_timeouts[message.id] = timeout_task
623
-
700
+
624
701
  # Notify subscribers again
625
- await self._notify_subscribers_reliable(message.channel, message.data, message.id)
702
+ await self._notify_subscribers_reliable(message.channel, message.data, message.metadata, message.id)
626
703
 
627
704
  # Clean up retry task reference
628
705
  self.message_timeouts.pop(f"{message.id}_retry", None)
@@ -499,9 +499,9 @@ class Workflow:
499
499
  raise WorkflowError(f"Failed to set up connection {connection}: {str(e)}")
500
500
 
501
501
  def _create_traced_callback(self, connection: Connection):
502
- """Create a callback that automatically traces workflow communication."""
503
- async def traced_callback(data: Any):
504
- """Callback that processes relay data and automatically traces communication."""
502
+ """Create a callback that automatically traces workflow communication and propagates metadata."""
503
+ async def traced_callback(data: Any, metadata: Optional[Dict[str, Any]] = None):
504
+ """Callback that processes relay data with automatic metadata propagation."""
505
505
  try:
506
506
  # Automatically trace the workflow communication
507
507
  await self._trace_workflow_communication(
@@ -511,28 +511,31 @@ class Workflow:
511
511
  data=data,
512
512
  success=True
513
513
  )
514
-
514
+
515
+ # Build enriched context with metadata propagation
516
+ enriched_context = {
517
+ 'source_agent': connection.from_agent,
518
+ 'channel': connection.channel,
519
+ 'workflow': self.name
520
+ }
521
+
522
+ # Add all upstream metadata to context
523
+ if metadata:
524
+ enriched_context.update(metadata)
525
+
515
526
  # Process the data with the destination agent or agent pool
516
527
  if connection.to_agent in self.agents:
517
528
  # Single agent
518
529
  dest_agent = self.agents[connection.to_agent]
519
530
  if hasattr(dest_agent, 'process'):
520
- await dest_agent.process(connection.task, data, {
521
- 'source_agent': connection.from_agent,
522
- 'channel': connection.channel,
523
- 'workflow': self.name
524
- })
531
+ await dest_agent.process(connection.task, data, enriched_context)
525
532
  else:
526
533
  logger.warning(f"Agent '{connection.to_agent}' has no process method")
527
534
 
528
535
  elif connection.to_agent in self.agent_pools:
529
- # Agent pool - submit task to pool
536
+ # Agent pool - submit task to pool with enriched context
530
537
  pool = self.agent_pools[connection.to_agent]
531
- await pool.submit_task(connection.task, data, {
532
- 'source_agent': connection.from_agent,
533
- 'channel': connection.channel,
534
- 'workflow': self.name
535
- })
538
+ await pool.submit_task(connection.task, data, enriched_context)
536
539
 
537
540
  else:
538
541
  logger.error(f"Destination '{connection.to_agent}' not found in agents or agent pools")
@@ -579,9 +582,9 @@ class Workflow:
579
582
  logger.warning(f"Failed to configure reliability for agent '{agent_name}': {e}")
580
583
 
581
584
  def _create_reliable_callback(self, connection: Connection):
582
- """Create a callback with reliability features enabled."""
583
- async def reliable_callback(data: Any, message_id: Optional[str] = None):
584
- """Callback that processes relay data with reliability features."""
585
+ """Create a callback with reliability features and metadata propagation."""
586
+ async def reliable_callback(data: Any, metadata: Optional[Dict[str, Any]] = None, message_id: Optional[str] = None):
587
+ """Callback that processes relay data with reliability features and metadata."""
585
588
  start_time = time.time()
586
589
 
587
590
  # Deduplication check (only if message_id provided)
@@ -607,16 +610,23 @@ class Workflow:
607
610
  message_id=message_id
608
611
  )
609
612
 
613
+ # Build enriched context with metadata propagation
614
+ enriched_context = {
615
+ 'source_agent': connection.from_agent,
616
+ 'channel': connection.channel,
617
+ 'workflow': self.name,
618
+ 'message_id': message_id,
619
+ 'reliability_enabled': True
620
+ }
621
+
622
+ # Add all upstream metadata to context
623
+ if metadata:
624
+ enriched_context.update(metadata)
625
+
610
626
  # Process the data with the destination agent
611
627
  dest_agent = self.agents[connection.to_agent]
612
628
  if hasattr(dest_agent, 'process'):
613
- context = {
614
- 'source_agent': connection.from_agent,
615
- 'channel': connection.channel,
616
- 'workflow': self.name,
617
- 'message_id': message_id,
618
- 'reliability_enabled': True
619
- }
629
+ context = enriched_context
620
630
 
621
631
  # Handle backpressure if enabled
622
632
  if (self.reliability_config and
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: daita-agents
3
- Version: 0.1.1
3
+ Version: 0.1.2
4
4
  Summary: Daita Agents - Data focused AI agent framework with free local use and premium hosted enterprise features
5
5
  Author-email: Daita <support@daita-tech.io>
6
6
  License: Proprietary
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "daita-agents"
7
- version = "0.1.1"
7
+ version = "0.1.2"
8
8
  description = "Daita Agents - Data focused AI agent framework with free local use and premium hosted enterprise features"
9
9
  readme = "README.md"
10
10
  authors = [{name = "Daita", email = "support@daita-tech.io"}]
File without changes
File without changes
File without changes