empathy-framework 5.0.3__py3-none-any.whl → 5.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. {empathy_framework-5.0.3.dist-info → empathy_framework-5.1.1.dist-info}/METADATA +259 -142
  2. {empathy_framework-5.0.3.dist-info → empathy_framework-5.1.1.dist-info}/RECORD +58 -28
  3. empathy_framework-5.1.1.dist-info/licenses/LICENSE +201 -0
  4. empathy_framework-5.1.1.dist-info/licenses/LICENSE_CHANGE_ANNOUNCEMENT.md +101 -0
  5. empathy_os/__init__.py +1 -1
  6. empathy_os/cli/commands/batch.py +5 -5
  7. empathy_os/cli/commands/routing.py +1 -1
  8. empathy_os/cli/commands/workflow.py +2 -1
  9. empathy_os/cli/parsers/cache 2.py +65 -0
  10. empathy_os/cli_minimal.py +3 -3
  11. empathy_os/cli_router 2.py +416 -0
  12. empathy_os/cli_router.py +12 -0
  13. empathy_os/dashboard/__init__.py +1 -2
  14. empathy_os/dashboard/app 2.py +512 -0
  15. empathy_os/dashboard/app.py +1 -1
  16. empathy_os/dashboard/simple_server 2.py +403 -0
  17. empathy_os/dashboard/standalone_server 2.py +536 -0
  18. empathy_os/memory/types 2.py +441 -0
  19. empathy_os/meta_workflows/intent_detector.py +71 -0
  20. empathy_os/models/__init__.py +19 -0
  21. empathy_os/models/adaptive_routing 2.py +437 -0
  22. empathy_os/models/auth_cli.py +444 -0
  23. empathy_os/models/auth_strategy.py +450 -0
  24. empathy_os/project_index/scanner_parallel 2.py +291 -0
  25. empathy_os/telemetry/agent_coordination 2.py +478 -0
  26. empathy_os/telemetry/agent_coordination.py +3 -3
  27. empathy_os/telemetry/agent_tracking 2.py +350 -0
  28. empathy_os/telemetry/agent_tracking.py +1 -2
  29. empathy_os/telemetry/approval_gates 2.py +563 -0
  30. empathy_os/telemetry/event_streaming 2.py +405 -0
  31. empathy_os/telemetry/event_streaming.py +3 -3
  32. empathy_os/telemetry/feedback_loop 2.py +557 -0
  33. empathy_os/telemetry/feedback_loop.py +1 -1
  34. empathy_os/vscode_bridge 2.py +173 -0
  35. empathy_os/workflows/__init__.py +8 -0
  36. empathy_os/workflows/autonomous_test_gen.py +569 -0
  37. empathy_os/workflows/bug_predict.py +45 -0
  38. empathy_os/workflows/code_review.py +92 -22
  39. empathy_os/workflows/document_gen.py +594 -62
  40. empathy_os/workflows/llm_base.py +363 -0
  41. empathy_os/workflows/perf_audit.py +69 -0
  42. empathy_os/workflows/progressive/README 2.md +454 -0
  43. empathy_os/workflows/progressive/__init__ 2.py +92 -0
  44. empathy_os/workflows/progressive/cli 2.py +242 -0
  45. empathy_os/workflows/progressive/core 2.py +488 -0
  46. empathy_os/workflows/progressive/orchestrator 2.py +701 -0
  47. empathy_os/workflows/progressive/reports 2.py +528 -0
  48. empathy_os/workflows/progressive/telemetry 2.py +280 -0
  49. empathy_os/workflows/progressive/test_gen 2.py +514 -0
  50. empathy_os/workflows/progressive/workflow 2.py +628 -0
  51. empathy_os/workflows/release_prep.py +54 -0
  52. empathy_os/workflows/security_audit.py +154 -79
  53. empathy_os/workflows/test_gen.py +60 -0
  54. empathy_os/workflows/test_gen_behavioral.py +477 -0
  55. empathy_os/workflows/test_gen_parallel.py +341 -0
  56. empathy_framework-5.0.3.dist-info/licenses/LICENSE +0 -139
  57. {empathy_framework-5.0.3.dist-info → empathy_framework-5.1.1.dist-info}/WHEEL +0 -0
  58. {empathy_framework-5.0.3.dist-info → empathy_framework-5.1.1.dist-info}/entry_points.txt +0 -0
  59. {empathy_framework-5.0.3.dist-info → empathy_framework-5.1.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,405 @@
1
+ """Real-Time Event Streaming using Redis Streams.
2
+
3
+ Pattern 4 from Agent Coordination Architecture - Publish agent events
4
+ to Redis Streams for real-time monitoring and WebSocket consumption.
5
+
6
+ Events types:
7
+ - agent_heartbeat: Agent liveness updates
8
+ - coordination_signal: Inter-agent coordination messages
9
+ - workflow_progress: Workflow stage progress
10
+ - agent_error: Agent failures and errors
11
+
12
+ Usage:
13
+ # Publish events
14
+ streamer = EventStreamer()
15
+ streamer.publish_event(
16
+ event_type="agent_heartbeat",
17
+ data={"agent_id": "worker-1", "status": "running", "progress": 0.5}
18
+ )
19
+
20
+ # Consume events (blocking)
21
+ for event in streamer.consume_events(event_types=["agent_heartbeat"]):
22
+ print(f"Received: {event}")
23
+
24
+ # Get recent events (non-blocking)
25
+ recent = streamer.get_recent_events(event_type="agent_heartbeat", count=100)
26
+
27
+ Copyright 2025 Smart-AI-Memory
28
+ Licensed under Fair Source License 0.9
29
+ """
30
+
31
+ from __future__ import annotations
32
+
33
+ import json
34
+ import logging
35
+ import time
36
+ from dataclasses import dataclass, field
37
+ from datetime import datetime
38
+ from typing import Any, Iterator
39
+
40
+ logger = logging.getLogger(__name__)
41
+
42
+
43
+ @dataclass
44
+ class StreamEvent:
45
+ """Event published to Redis Stream."""
46
+
47
+ event_id: str # Redis stream entry ID (e.g., "1706356800000-0")
48
+ event_type: str # "agent_heartbeat", "coordination_signal", etc.
49
+ timestamp: datetime
50
+ data: dict[str, Any]
51
+ source: str = "empathy_os" # Source system
52
+
53
+ def to_dict(self) -> dict[str, Any]:
54
+ """Convert to dictionary for serialization."""
55
+ return {
56
+ "event_id": self.event_id,
57
+ "event_type": self.event_type,
58
+ "timestamp": self.timestamp.isoformat() if isinstance(self.timestamp, datetime) else self.timestamp,
59
+ "data": self.data,
60
+ "source": self.source,
61
+ }
62
+
63
+ @classmethod
64
+ def from_redis_entry(cls, event_id: str, entry_data: dict[bytes, bytes]) -> StreamEvent:
65
+ """Create from Redis stream entry.
66
+
67
+ Args:
68
+ event_id: Redis stream entry ID
69
+ entry_data: Raw entry data from Redis (bytes dict)
70
+
71
+ Returns:
72
+ StreamEvent instance
73
+ """
74
+ # Decode bytes to strings
75
+ decoded = {k.decode("utf-8"): v.decode("utf-8") for k, v in entry_data.items()}
76
+
77
+ # Parse timestamp
78
+ timestamp_str = decoded.get("timestamp", "")
79
+ try:
80
+ timestamp = datetime.fromisoformat(timestamp_str)
81
+ except (ValueError, AttributeError):
82
+ timestamp = datetime.utcnow()
83
+
84
+ # Parse data field (JSON)
85
+ data_str = decoded.get("data", "{}")
86
+ try:
87
+ data = json.loads(data_str)
88
+ except json.JSONDecodeError:
89
+ data = {}
90
+
91
+ return cls(
92
+ event_id=event_id,
93
+ event_type=decoded.get("event_type", "unknown"),
94
+ timestamp=timestamp,
95
+ data=data,
96
+ source=decoded.get("source", "empathy_os"),
97
+ )
98
+
99
+
100
+ class EventStreamer:
101
+ """Real-time event streaming using Redis Streams.
102
+
103
+ Publishes events to Redis Streams and provides methods for consuming
104
+ events via polling or blocking reads.
105
+
106
+ Stream naming: empathy:events:{event_type}
107
+ Examples:
108
+ - empathy:events:agent_heartbeat
109
+ - empathy:events:coordination_signal
110
+ - empathy:events:workflow_progress
111
+ """
112
+
113
+ STREAM_PREFIX = "empathy:events:"
114
+ MAX_STREAM_LENGTH = 10000 # Trim streams to last 10K events
115
+ DEFAULT_BLOCK_MS = 5000 # 5 seconds blocking read timeout
116
+
117
+ def __init__(self, memory=None):
118
+ """Initialize event streamer.
119
+
120
+ Args:
121
+ memory: Memory backend with Redis connection
122
+ """
123
+ self.memory = memory
124
+
125
+ if self.memory is None:
126
+ try:
127
+ from empathy_os.telemetry import UsageTracker
128
+
129
+ tracker = UsageTracker.get_instance()
130
+ if hasattr(tracker, "_memory"):
131
+ self.memory = tracker._memory
132
+ except (ImportError, AttributeError):
133
+ pass
134
+
135
+ if self.memory is None:
136
+ logger.warning("No memory backend available for event streaming")
137
+
138
+ def _get_stream_key(self, event_type: str) -> str:
139
+ """Get Redis stream key for an event type.
140
+
141
+ Args:
142
+ event_type: Type of event
143
+
144
+ Returns:
145
+ Stream key (e.g., "empathy:events:agent_heartbeat")
146
+ """
147
+ return f"{self.STREAM_PREFIX}{event_type}"
148
+
149
+ def publish_event(
150
+ self,
151
+ event_type: str,
152
+ data: dict[str, Any],
153
+ source: str = "empathy_os",
154
+ ) -> str:
155
+ """Publish an event to Redis Stream.
156
+
157
+ Args:
158
+ event_type: Type of event (e.g., "agent_heartbeat", "coordination_signal")
159
+ data: Event payload data
160
+ source: Source system (default "empathy_os")
161
+
162
+ Returns:
163
+ Event ID (Redis stream entry ID) if successful, empty string otherwise
164
+ """
165
+ if not self.memory or not hasattr(self.memory, "_redis"):
166
+ logger.debug("Cannot publish event: no Redis backend")
167
+ return ""
168
+
169
+ stream_key = self._get_stream_key(event_type)
170
+
171
+ # Prepare entry data
172
+ entry = {
173
+ "event_type": event_type,
174
+ "timestamp": datetime.utcnow().isoformat(),
175
+ "data": json.dumps(data),
176
+ "source": source,
177
+ }
178
+
179
+ try:
180
+ # Add to stream with automatic trimming (MAXLEN)
181
+ event_id = self.memory._redis.xadd(
182
+ stream_key,
183
+ entry,
184
+ maxlen=self.MAX_STREAM_LENGTH,
185
+ approximate=True, # Use ~ for performance
186
+ )
187
+
188
+ # Decode event_id if bytes
189
+ if isinstance(event_id, bytes):
190
+ event_id = event_id.decode("utf-8")
191
+
192
+ logger.debug(f"Published event {event_type}: {event_id}")
193
+ return event_id
194
+
195
+ except Exception as e:
196
+ logger.error(f"Failed to publish event {event_type}: {e}")
197
+ return ""
198
+
199
+ def consume_events(
200
+ self,
201
+ event_types: list[str] | None = None,
202
+ block_ms: int | None = None,
203
+ count: int = 10,
204
+ start_id: str = "$",
205
+ ) -> Iterator[StreamEvent]:
206
+ """Consume events from Redis Streams (blocking iterator).
207
+
208
+ Args:
209
+ event_types: List of event types to consume (None = all types)
210
+ block_ms: Blocking timeout in milliseconds (None = DEFAULT_BLOCK_MS)
211
+ count: Number of events to read per batch
212
+ start_id: Stream position to start from ("$" = new events only, "0" = all events)
213
+
214
+ Yields:
215
+ StreamEvent instances as they arrive
216
+
217
+ Example:
218
+ >>> streamer = EventStreamer()
219
+ >>> for event in streamer.consume_events(event_types=["agent_heartbeat"]):
220
+ ... print(f"Agent {event.data['agent_id']} status: {event.data['status']}")
221
+ """
222
+ if not self.memory or not hasattr(self.memory, "_redis"):
223
+ logger.warning("Cannot consume events: no Redis backend")
224
+ return
225
+
226
+ block_ms = block_ms if block_ms is not None else self.DEFAULT_BLOCK_MS
227
+
228
+ # Determine streams to read
229
+ if event_types:
230
+ streams = {self._get_stream_key(et): start_id for et in event_types}
231
+ else:
232
+ # Subscribe to all event streams (expensive - requires KEYS scan)
233
+ all_streams = self.memory._redis.keys(f"{self.STREAM_PREFIX}*")
234
+ streams = {s.decode("utf-8") if isinstance(s, bytes) else s: start_id for s in all_streams}
235
+
236
+ if not streams:
237
+ logger.debug("No streams to consume")
238
+ return
239
+
240
+ # Track last IDs for each stream
241
+ last_ids = streams.copy()
242
+
243
+ try:
244
+ while True:
245
+ # XREAD: blocking read from multiple streams
246
+ results = self.memory._redis.xread(
247
+ last_ids,
248
+ count=count,
249
+ block=block_ms,
250
+ )
251
+
252
+ if not results:
253
+ # Timeout - no new events
254
+ continue
255
+
256
+ # Process results
257
+ for stream_key, entries in results:
258
+ # Decode stream key if bytes
259
+ if isinstance(stream_key, bytes):
260
+ stream_key = stream_key.decode("utf-8")
261
+
262
+ for event_id, entry_data in entries:
263
+ # Decode event_id if bytes
264
+ if isinstance(event_id, bytes):
265
+ event_id = event_id.decode("utf-8")
266
+
267
+ # Parse event
268
+ event = StreamEvent.from_redis_entry(event_id, entry_data)
269
+ yield event
270
+
271
+ # Update last_id for this stream
272
+ last_ids[stream_key] = event_id
273
+
274
+ except KeyboardInterrupt:
275
+ logger.info("Event consumption interrupted")
276
+ except Exception as e:
277
+ logger.error(f"Error consuming events: {e}")
278
+
279
+ def get_recent_events(
280
+ self,
281
+ event_type: str,
282
+ count: int = 100,
283
+ start_id: str = "-",
284
+ end_id: str = "+",
285
+ ) -> list[StreamEvent]:
286
+ """Get recent events from a stream (non-blocking).
287
+
288
+ Args:
289
+ event_type: Type of event to retrieve
290
+ count: Maximum number of events to return
291
+ start_id: Start position ("-" = oldest, specific ID = from that point)
292
+ end_id: End position ("+" = newest, specific ID = up to that point)
293
+
294
+ Returns:
295
+ List of recent events (newest first)
296
+ """
297
+ if not self.memory or not hasattr(self.memory, "_redis"):
298
+ logger.debug("Cannot get recent events: no Redis backend")
299
+ return []
300
+
301
+ stream_key = self._get_stream_key(event_type)
302
+
303
+ try:
304
+ # XREVRANGE: get events in reverse chronological order
305
+ results = self.memory._redis.xrevrange(
306
+ stream_key,
307
+ max=end_id,
308
+ min=start_id,
309
+ count=count,
310
+ )
311
+
312
+ events = []
313
+ for event_id, entry_data in results:
314
+ # Decode event_id if bytes
315
+ if isinstance(event_id, bytes):
316
+ event_id = event_id.decode("utf-8")
317
+
318
+ event = StreamEvent.from_redis_entry(event_id, entry_data)
319
+ events.append(event)
320
+
321
+ return events
322
+
323
+ except Exception as e:
324
+ logger.error(f"Failed to get recent events for {event_type}: {e}")
325
+ return []
326
+
327
+ def get_stream_info(self, event_type: str) -> dict[str, Any]:
328
+ """Get information about a stream.
329
+
330
+ Args:
331
+ event_type: Type of event stream
332
+
333
+ Returns:
334
+ Dictionary with stream info (length, first_entry, last_entry, etc.)
335
+ """
336
+ if not self.memory or not hasattr(self.memory, "_redis"):
337
+ return {}
338
+
339
+ stream_key = self._get_stream_key(event_type)
340
+
341
+ try:
342
+ info = self.memory._redis.xinfo_stream(stream_key)
343
+
344
+ # Decode bytes keys/values
345
+ decoded_info = {}
346
+ for key, value in info.items():
347
+ if isinstance(key, bytes):
348
+ key = key.decode("utf-8")
349
+ if isinstance(value, bytes):
350
+ value = value.decode("utf-8")
351
+ decoded_info[key] = value
352
+
353
+ return decoded_info
354
+
355
+ except Exception as e:
356
+ logger.debug(f"Failed to get stream info for {event_type}: {e}")
357
+ return {}
358
+
359
+ def delete_stream(self, event_type: str) -> bool:
360
+ """Delete an event stream.
361
+
362
+ Args:
363
+ event_type: Type of event stream to delete
364
+
365
+ Returns:
366
+ True if deleted, False otherwise
367
+ """
368
+ if not self.memory or not hasattr(self.memory, "_redis"):
369
+ return False
370
+
371
+ stream_key = self._get_stream_key(event_type)
372
+
373
+ try:
374
+ result = self.memory._redis.delete(stream_key)
375
+ return result > 0
376
+ except Exception as e:
377
+ logger.error(f"Failed to delete stream {event_type}: {e}")
378
+ return False
379
+
380
+ def trim_stream(self, event_type: str, max_length: int = 1000) -> int:
381
+ """Trim a stream to a maximum length.
382
+
383
+ Args:
384
+ event_type: Type of event stream
385
+ max_length: Maximum number of events to keep
386
+
387
+ Returns:
388
+ Number of events trimmed
389
+ """
390
+ if not self.memory or not hasattr(self.memory, "_redis"):
391
+ return 0
392
+
393
+ stream_key = self._get_stream_key(event_type)
394
+
395
+ try:
396
+ # XTRIM: trim to approximate max length
397
+ trimmed = self.memory._redis.xtrim(
398
+ stream_key,
399
+ maxlen=max_length,
400
+ approximate=True,
401
+ )
402
+ return trimmed
403
+ except Exception as e:
404
+ logger.error(f"Failed to trim stream {event_type}: {e}")
405
+ return 0
@@ -32,10 +32,10 @@ from __future__ import annotations
32
32
 
33
33
  import json
34
34
  import logging
35
- import time
36
- from dataclasses import dataclass, field
35
+ from collections.abc import Iterator
36
+ from dataclasses import dataclass
37
37
  from datetime import datetime
38
- from typing import Any, Iterator
38
+ from typing import Any
39
39
 
40
40
  logger = logging.getLogger(__name__)
41
41