daita-agents 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of daita-agents might be problematic. Click here for more details.

Files changed (69) hide show
  1. daita/__init__.py +208 -0
  2. daita/agents/__init__.py +33 -0
  3. daita/agents/base.py +722 -0
  4. daita/agents/substrate.py +895 -0
  5. daita/cli/__init__.py +145 -0
  6. daita/cli/__main__.py +7 -0
  7. daita/cli/ascii_art.py +44 -0
  8. daita/cli/core/__init__.py +0 -0
  9. daita/cli/core/create.py +254 -0
  10. daita/cli/core/deploy.py +473 -0
  11. daita/cli/core/deployments.py +309 -0
  12. daita/cli/core/import_detector.py +219 -0
  13. daita/cli/core/init.py +382 -0
  14. daita/cli/core/logs.py +239 -0
  15. daita/cli/core/managed_deploy.py +709 -0
  16. daita/cli/core/run.py +648 -0
  17. daita/cli/core/status.py +421 -0
  18. daita/cli/core/test.py +239 -0
  19. daita/cli/core/webhooks.py +172 -0
  20. daita/cli/main.py +588 -0
  21. daita/cli/utils.py +541 -0
  22. daita/config/__init__.py +62 -0
  23. daita/config/base.py +159 -0
  24. daita/config/settings.py +184 -0
  25. daita/core/__init__.py +262 -0
  26. daita/core/decision_tracing.py +701 -0
  27. daita/core/exceptions.py +480 -0
  28. daita/core/focus.py +251 -0
  29. daita/core/interfaces.py +76 -0
  30. daita/core/plugin_tracing.py +550 -0
  31. daita/core/relay.py +695 -0
  32. daita/core/reliability.py +381 -0
  33. daita/core/scaling.py +444 -0
  34. daita/core/tools.py +402 -0
  35. daita/core/tracing.py +770 -0
  36. daita/core/workflow.py +1084 -0
  37. daita/display/__init__.py +1 -0
  38. daita/display/console.py +160 -0
  39. daita/execution/__init__.py +58 -0
  40. daita/execution/client.py +856 -0
  41. daita/execution/exceptions.py +92 -0
  42. daita/execution/models.py +317 -0
  43. daita/llm/__init__.py +60 -0
  44. daita/llm/anthropic.py +166 -0
  45. daita/llm/base.py +373 -0
  46. daita/llm/factory.py +101 -0
  47. daita/llm/gemini.py +152 -0
  48. daita/llm/grok.py +114 -0
  49. daita/llm/mock.py +135 -0
  50. daita/llm/openai.py +109 -0
  51. daita/plugins/__init__.py +141 -0
  52. daita/plugins/base.py +37 -0
  53. daita/plugins/base_db.py +167 -0
  54. daita/plugins/elasticsearch.py +844 -0
  55. daita/plugins/mcp.py +481 -0
  56. daita/plugins/mongodb.py +510 -0
  57. daita/plugins/mysql.py +351 -0
  58. daita/plugins/postgresql.py +331 -0
  59. daita/plugins/redis_messaging.py +500 -0
  60. daita/plugins/rest.py +529 -0
  61. daita/plugins/s3.py +761 -0
  62. daita/plugins/slack.py +729 -0
  63. daita/utils/__init__.py +18 -0
  64. daita_agents-0.1.0.dist-info/METADATA +350 -0
  65. daita_agents-0.1.0.dist-info/RECORD +69 -0
  66. daita_agents-0.1.0.dist-info/WHEEL +5 -0
  67. daita_agents-0.1.0.dist-info/entry_points.txt +2 -0
  68. daita_agents-0.1.0.dist-info/licenses/LICENSE +56 -0
  69. daita_agents-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,500 @@
1
+ """
2
+ Redis Messaging Plugin for Daita Agents.
3
+
4
+ Provides distributed messaging and persistent storage using Redis Streams and Pub/Sub.
5
+ Solves single-process limitation and memory-bound storage while maintaining the
6
+ existing RelayManager API.
7
+
8
+ Features:
9
+ - Redis Streams for message persistence and ordering
10
+ - Redis Pub/Sub for real-time message delivery
11
+ - Automatic TTL-based message cleanup
12
+ - Connection pooling and error handling
13
+ - Compatible with existing RelayManager interface
14
+
15
+ Example:
16
+ ```python
17
+ from daita.plugins import redis_messaging
18
+ from daita.core.workflow import Workflow
19
+
20
+ # Create Redis messaging plugin
21
+ redis_msg = redis_messaging(url="redis://localhost:6379")
22
+
23
+ # Use with workflow
24
+ workflow = Workflow("Distributed Pipeline", messaging_plugin=redis_msg)
25
+ workflow.connect("agent_a", "data_channel", "agent_b")
26
+ ```
27
+ """
28
+
29
+ import asyncio
30
+ import json
31
+ import logging
32
+ import time
33
+ import uuid
34
+ from typing import Dict, Any, Optional, List, Callable, Union
35
+ import weakref
36
+
37
+ logger = logging.getLogger(__name__)
38
+
39
+ class RedisMessagingPlugin:
40
+ """
41
+ Redis messaging plugin for distributed agent communication.
42
+
43
+ Uses Redis Streams for message persistence and Pub/Sub for real-time delivery.
44
+ Maintains compatibility with existing RelayManager interface while adding
45
+ distributed capabilities.
46
+ """
47
+
48
+ def __init__(
49
+ self,
50
+ url: str = "redis://localhost:6379",
51
+ max_connections: int = 10,
52
+ message_ttl: int = 86400, # 24 hours
53
+ max_stream_length: int = 10000,
54
+ connection_timeout: int = 30,
55
+ **kwargs
56
+ ):
57
+ """
58
+ Initialize Redis messaging plugin.
59
+
60
+ Args:
61
+ url: Redis connection URL
62
+ max_connections: Maximum Redis connections in pool
63
+ message_ttl: Message TTL in seconds (24 hours default)
64
+ max_stream_length: Maximum messages per stream (10k default)
65
+ connection_timeout: Connection timeout in seconds
66
+ **kwargs: Additional Redis parameters
67
+ """
68
+ self.url = url
69
+ self.max_connections = max_connections
70
+ self.message_ttl = message_ttl
71
+ self.max_stream_length = max_stream_length
72
+ self.connection_timeout = connection_timeout
73
+ self.extra_params = kwargs
74
+
75
+ # Redis connections (lazy initialization)
76
+ self._redis_pool = None
77
+ self._pubsub_connections: Dict[str, Any] = {}
78
+
79
+ # Subscribers: channel_name -> weak set of callbacks
80
+ self._subscribers: Dict[str, weakref.WeakSet] = {}
81
+
82
+ # Running state
83
+ self._running = False
84
+
85
+ logger.debug(f"RedisMessagingPlugin initialized (url: {url})")
86
+
87
+ async def start(self) -> None:
88
+ """Start the Redis messaging plugin."""
89
+ if self._running:
90
+ return
91
+
92
+ try:
93
+ import redis.asyncio as redis
94
+ except ImportError:
95
+ raise ImportError(
96
+ "redis package required for RedisMessagingPlugin. "
97
+ "Install with: pip install redis"
98
+ )
99
+
100
+ # Create connection pool
101
+ self._redis_pool = redis.ConnectionPool.from_url(
102
+ self.url,
103
+ max_connections=self.max_connections,
104
+ socket_connect_timeout=self.connection_timeout,
105
+ socket_keepalive=True,
106
+ socket_keepalive_options={},
107
+ health_check_interval=30,
108
+ **self.extra_params
109
+ )
110
+
111
+ self._running = True
112
+ logger.info("RedisMessagingPlugin started")
113
+
114
+ async def stop(self) -> None:
115
+ """Stop the Redis messaging plugin and cleanup connections."""
116
+ if not self._running:
117
+ return
118
+
119
+ self._running = False
120
+
121
+ # Close all pubsub connections
122
+ for channel, pubsub in self._pubsub_connections.items():
123
+ try:
124
+ await pubsub.unsubscribe(f"pubsub:{channel}")
125
+ await pubsub.close()
126
+ except Exception as e:
127
+ logger.warning(f"Error closing pubsub connection for {channel}: {e}")
128
+
129
+ self._pubsub_connections.clear()
130
+
131
+ # Close connection pool
132
+ if self._redis_pool:
133
+ await self._redis_pool.disconnect()
134
+ self._redis_pool = None
135
+
136
+ logger.info("RedisMessagingPlugin stopped")
137
+
138
+ async def _get_redis(self):
139
+ """Get Redis client from pool."""
140
+ if not self._running:
141
+ await self.start()
142
+
143
+ import redis.asyncio as redis
144
+ return redis.Redis(connection_pool=self._redis_pool)
145
+
146
+ async def publish(self, channel: str, message: Dict[str, Any],
147
+ publisher: Optional[str] = None) -> str:
148
+ """
149
+ Publish message to Redis stream and notify via pub/sub.
150
+
151
+ Args:
152
+ channel: Channel name
153
+ message: Message data
154
+ publisher: Optional publisher identifier
155
+
156
+ Returns:
157
+ Message ID
158
+ """
159
+ if not self._running:
160
+ await self.start()
161
+
162
+ redis_client = await self._get_redis()
163
+ message_id = uuid.uuid4().hex
164
+
165
+ try:
166
+ # Prepare message for storage
167
+ stored_message = {
168
+ "id": message_id,
169
+ "data": message,
170
+ "publisher": publisher,
171
+ "timestamp": time.time()
172
+ }
173
+
174
+ # Store in Redis Stream for persistence
175
+ stream_key = f"stream:{channel}"
176
+ await redis_client.xadd(
177
+ stream_key,
178
+ stored_message,
179
+ id=message_id,
180
+ maxlen=self.max_stream_length,
181
+ approximate=True
182
+ )
183
+
184
+ # Set TTL on stream
185
+ await redis_client.expire(stream_key, self.message_ttl)
186
+
187
+ # Notify via pub/sub for real-time delivery
188
+ pubsub_key = f"pubsub:{channel}"
189
+ notification = {
190
+ "message_id": message_id,
191
+ "data": message,
192
+ "publisher": publisher,
193
+ "timestamp": time.time()
194
+ }
195
+
196
+ await redis_client.publish(pubsub_key, json.dumps(notification))
197
+
198
+ logger.debug(f"Published message {message_id} to channel '{channel}'")
199
+ return message_id
200
+
201
+ except Exception as e:
202
+ logger.error(f"Error publishing message to Redis: {e}")
203
+ raise
204
+ finally:
205
+ await redis_client.close()
206
+
207
+ async def subscribe(self, channel: str, callback: Callable) -> None:
208
+ """
209
+ Subscribe to channel with callback.
210
+
211
+ Args:
212
+ channel: Channel name
213
+ callback: Callback function to receive messages
214
+ """
215
+ if not self._running:
216
+ await self.start()
217
+
218
+ # Add callback to subscribers
219
+ if channel not in self._subscribers:
220
+ self._subscribers[channel] = weakref.WeakSet()
221
+ self._subscribers[channel].add(callback)
222
+
223
+ # Start pubsub listener if not already running
224
+ if channel not in self._pubsub_connections:
225
+ await self._start_pubsub_listener(channel)
226
+
227
+ logger.debug(f"Subscribed to channel '{channel}'")
228
+
229
+ async def _start_pubsub_listener(self, channel: str) -> None:
230
+ """Start pub/sub listener for a channel."""
231
+ try:
232
+ redis_client = await self._get_redis()
233
+ pubsub = redis_client.pubsub()
234
+
235
+ await pubsub.subscribe(f"pubsub:{channel}")
236
+ self._pubsub_connections[channel] = pubsub
237
+
238
+ # Start listener task
239
+ task = asyncio.create_task(self._pubsub_message_handler(channel, pubsub))
240
+ task.add_done_callback(lambda t: t.exception() if not t.cancelled() else None)
241
+
242
+ except Exception as e:
243
+ logger.error(f"Error starting pubsub listener for {channel}: {e}")
244
+
245
+ async def _pubsub_message_handler(self, channel: str, pubsub) -> None:
246
+ """Handle messages from pub/sub subscription."""
247
+ try:
248
+ async for message in pubsub.listen():
249
+ if message['type'] == 'message':
250
+ try:
251
+ # Parse message data
252
+ data = json.loads(message['data'].decode())
253
+ message_data = data['data']
254
+
255
+ # Notify all subscribers
256
+ await self._notify_subscribers(channel, message_data)
257
+
258
+ except Exception as e:
259
+ logger.error(f"Error processing pubsub message: {e}")
260
+
261
+ except asyncio.CancelledError:
262
+ # Clean shutdown
263
+ pass
264
+ except Exception as e:
265
+ logger.error(f"Error in pubsub message handler for {channel}: {e}")
266
+ finally:
267
+ # Cleanup
268
+ try:
269
+ await pubsub.unsubscribe(f"pubsub:{channel}")
270
+ await pubsub.close()
271
+ except Exception:
272
+ pass
273
+
274
+ self._pubsub_connections.pop(channel, None)
275
+
276
+ async def _notify_subscribers(self, channel: str, message_data: Any) -> None:
277
+ """Notify all subscribers of a channel."""
278
+ if channel not in self._subscribers:
279
+ return
280
+
281
+ # Get snapshot of subscribers
282
+ subscriber_list = list(self._subscribers[channel])
283
+
284
+ if not subscriber_list:
285
+ return
286
+
287
+ # Notify all subscribers concurrently
288
+ tasks = []
289
+ for subscriber in subscriber_list:
290
+ task = asyncio.create_task(self._call_subscriber(subscriber, message_data))
291
+ task.add_done_callback(lambda t: t.exception() if not t.cancelled() else None)
292
+ tasks.append(task)
293
+
294
+ # Wait for all notifications
295
+ if tasks:
296
+ await asyncio.gather(*tasks, return_exceptions=True)
297
+
298
+ async def _call_subscriber(self, callback: Callable, message_data: Any) -> None:
299
+ """Safely call a subscriber callback."""
300
+ try:
301
+ if asyncio.iscoroutinefunction(callback):
302
+ await callback(message_data)
303
+ else:
304
+ # Run sync callback in thread pool
305
+ loop = asyncio.get_event_loop()
306
+ await loop.run_in_executor(None, callback, message_data)
307
+ except Exception as e:
308
+ logger.error(f"Error in subscriber callback: {str(e)}")
309
+
310
+ async def get_latest(self, channel: str, count: int = 1) -> List[Any]:
311
+ """
312
+ Get latest messages from Redis stream.
313
+
314
+ Args:
315
+ channel: Channel name
316
+ count: Number of messages to retrieve
317
+
318
+ Returns:
319
+ List of message data (newest first)
320
+ """
321
+ if not self._running:
322
+ await self.start()
323
+
324
+ redis_client = await self._get_redis()
325
+
326
+ try:
327
+ stream_key = f"stream:{channel}"
328
+
329
+ # Get latest messages from stream (XREVRANGE for newest first)
330
+ messages = await redis_client.xrevrange(stream_key, count=count)
331
+
332
+ # Extract message data
333
+ result = []
334
+ for message_id, fields in messages:
335
+ try:
336
+ # Parse the stored message data
337
+ data = fields.get(b'data', b'{}').decode()
338
+ message_data = json.loads(data) if data != '{}' else {}
339
+ result.append(message_data)
340
+ except Exception as e:
341
+ logger.warning(f"Error parsing message {message_id}: {e}")
342
+
343
+ return result
344
+
345
+ except Exception as e:
346
+ logger.error(f"Error retrieving latest messages from Redis: {e}")
347
+ return []
348
+ finally:
349
+ await redis_client.close()
350
+
351
+ async def health_check(self) -> Dict[str, Any]:
352
+ """
353
+ Check Redis connection health.
354
+
355
+ Returns:
356
+ Health status information
357
+ """
358
+ try:
359
+ if not self._running:
360
+ return {"status": "stopped", "connected": False}
361
+
362
+ redis_client = await self._get_redis()
363
+
364
+ # Test connection with ping
365
+ start_time = time.time()
366
+ await redis_client.ping()
367
+ ping_time = (time.time() - start_time) * 1000 # ms
368
+
369
+ # Get Redis info
370
+ info = await redis_client.info()
371
+
372
+ await redis_client.close()
373
+
374
+ return {
375
+ "status": "healthy",
376
+ "connected": True,
377
+ "ping_time_ms": round(ping_time, 2),
378
+ "redis_version": info.get("redis_version", "unknown"),
379
+ "used_memory_human": info.get("used_memory_human", "unknown"),
380
+ "connected_clients": info.get("connected_clients", 0),
381
+ "total_commands_processed": info.get("total_commands_processed", 0)
382
+ }
383
+
384
+ except Exception as e:
385
+ return {
386
+ "status": "unhealthy",
387
+ "connected": False,
388
+ "error": str(e)
389
+ }
390
+
391
+ async def clear_channel(self, channel: str) -> bool:
392
+ """
393
+ Clear all messages from a channel.
394
+
395
+ Args:
396
+ channel: Channel name
397
+
398
+ Returns:
399
+ True if channel was cleared
400
+ """
401
+ if not self._running:
402
+ await self.start()
403
+
404
+ redis_client = await self._get_redis()
405
+
406
+ try:
407
+ stream_key = f"stream:{channel}"
408
+ deleted = await redis_client.delete(stream_key)
409
+
410
+ logger.debug(f"Cleared channel '{channel}' (deleted: {deleted})")
411
+ return deleted > 0
412
+
413
+ except Exception as e:
414
+ logger.error(f"Error clearing Redis channel {channel}: {e}")
415
+ return False
416
+ finally:
417
+ await redis_client.close()
418
+
419
+ async def get_stats(self) -> Dict[str, Any]:
420
+ """Get Redis messaging statistics."""
421
+ if not self._running:
422
+ return {"status": "stopped"}
423
+
424
+ try:
425
+ redis_client = await self._get_redis()
426
+ info = await redis_client.info()
427
+ await redis_client.close()
428
+
429
+ return {
430
+ "status": "running",
431
+ "redis_version": info.get("redis_version", "unknown"),
432
+ "used_memory": info.get("used_memory", 0),
433
+ "used_memory_human": info.get("used_memory_human", "0B"),
434
+ "connected_clients": info.get("connected_clients", 0),
435
+ "total_commands_processed": info.get("total_commands_processed", 0),
436
+ "pubsub_channels": len(self._pubsub_connections),
437
+ "subscriber_channels": len(self._subscribers)
438
+ }
439
+
440
+ except Exception as e:
441
+ return {
442
+ "status": "error",
443
+ "error": str(e)
444
+ }
445
+
446
+ # Context manager support
447
+ async def __aenter__(self) -> "RedisMessagingPlugin":
448
+ """Async context manager entry."""
449
+ await self.start()
450
+ return self
451
+
452
+ async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
453
+ """Async context manager exit."""
454
+ await self.stop()
455
+
456
+ # Factory function for easy instantiation
457
+ def redis_messaging(
458
+ url: str = "redis://localhost:6379",
459
+ max_connections: int = 10,
460
+ message_ttl: int = 86400,
461
+ max_stream_length: int = 10000,
462
+ connection_timeout: int = 30,
463
+ **kwargs
464
+ ) -> RedisMessagingPlugin:
465
+ """
466
+ Create a Redis messaging plugin instance.
467
+
468
+ Args:
469
+ url: Redis connection URL (default: redis://localhost:6379)
470
+ max_connections: Maximum Redis connections in pool (default: 10)
471
+ message_ttl: Message TTL in seconds (default: 24 hours)
472
+ max_stream_length: Maximum messages per stream (default: 10k)
473
+ connection_timeout: Connection timeout in seconds (default: 30)
474
+ **kwargs: Additional Redis parameters
475
+
476
+ Returns:
477
+ RedisMessagingPlugin instance
478
+
479
+ Example:
480
+ ```python
481
+ # Basic usage
482
+ redis_msg = redis_messaging()
483
+
484
+ # Production configuration
485
+ redis_msg = redis_messaging(
486
+ url="redis://redis-cluster:6379",
487
+ max_connections=20,
488
+ message_ttl=7*24*3600, # 7 days
489
+ max_stream_length=50000
490
+ )
491
+ ```
492
+ """
493
+ return RedisMessagingPlugin(
494
+ url=url,
495
+ max_connections=max_connections,
496
+ message_ttl=message_ttl,
497
+ max_stream_length=max_stream_length,
498
+ connection_timeout=connection_timeout,
499
+ **kwargs
500
+ )