empathy-framework 5.0.1__py3-none-any.whl → 5.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {empathy_framework-5.0.1.dist-info → empathy_framework-5.1.0.dist-info}/METADATA +311 -150
- {empathy_framework-5.0.1.dist-info → empathy_framework-5.1.0.dist-info}/RECORD +60 -33
- empathy_framework-5.1.0.dist-info/licenses/LICENSE +201 -0
- empathy_framework-5.1.0.dist-info/licenses/LICENSE_CHANGE_ANNOUNCEMENT.md +101 -0
- empathy_llm_toolkit/providers.py +175 -35
- empathy_llm_toolkit/utils/tokens.py +150 -30
- empathy_os/__init__.py +1 -1
- empathy_os/cli/commands/batch.py +256 -0
- empathy_os/cli/commands/cache.py +248 -0
- empathy_os/cli/commands/inspect.py +1 -2
- empathy_os/cli/commands/metrics.py +1 -1
- empathy_os/cli/commands/routing.py +285 -0
- empathy_os/cli/commands/workflow.py +2 -1
- empathy_os/cli/parsers/__init__.py +6 -0
- empathy_os/cli/parsers/batch.py +118 -0
- empathy_os/cli/parsers/cache 2.py +65 -0
- empathy_os/cli/parsers/cache.py +65 -0
- empathy_os/cli/parsers/routing.py +110 -0
- empathy_os/cli_minimal.py +3 -3
- empathy_os/cli_router 2.py +416 -0
- empathy_os/dashboard/__init__.py +1 -2
- empathy_os/dashboard/app 2.py +512 -0
- empathy_os/dashboard/app.py +1 -1
- empathy_os/dashboard/simple_server 2.py +403 -0
- empathy_os/dashboard/standalone_server 2.py +536 -0
- empathy_os/dashboard/standalone_server.py +22 -11
- empathy_os/memory/types 2.py +441 -0
- empathy_os/metrics/collector.py +31 -0
- empathy_os/models/__init__.py +19 -0
- empathy_os/models/adaptive_routing 2.py +437 -0
- empathy_os/models/auth_cli.py +444 -0
- empathy_os/models/auth_strategy.py +450 -0
- empathy_os/models/token_estimator.py +21 -13
- empathy_os/project_index/scanner_parallel 2.py +291 -0
- empathy_os/telemetry/agent_coordination 2.py +478 -0
- empathy_os/telemetry/agent_coordination.py +14 -16
- empathy_os/telemetry/agent_tracking 2.py +350 -0
- empathy_os/telemetry/agent_tracking.py +18 -20
- empathy_os/telemetry/approval_gates 2.py +563 -0
- empathy_os/telemetry/approval_gates.py +27 -39
- empathy_os/telemetry/event_streaming 2.py +405 -0
- empathy_os/telemetry/event_streaming.py +22 -22
- empathy_os/telemetry/feedback_loop 2.py +557 -0
- empathy_os/telemetry/feedback_loop.py +14 -17
- empathy_os/workflows/__init__.py +8 -0
- empathy_os/workflows/autonomous_test_gen.py +569 -0
- empathy_os/workflows/batch_processing.py +56 -10
- empathy_os/workflows/bug_predict.py +45 -0
- empathy_os/workflows/code_review.py +92 -22
- empathy_os/workflows/document_gen.py +594 -62
- empathy_os/workflows/llm_base.py +363 -0
- empathy_os/workflows/perf_audit.py +69 -0
- empathy_os/workflows/release_prep.py +54 -0
- empathy_os/workflows/security_audit.py +154 -79
- empathy_os/workflows/test_gen.py +60 -0
- empathy_os/workflows/test_gen_behavioral.py +477 -0
- empathy_os/workflows/test_gen_parallel.py +341 -0
- empathy_framework-5.0.1.dist-info/licenses/LICENSE +0 -139
- {empathy_framework-5.0.1.dist-info → empathy_framework-5.1.0.dist-info}/WHEEL +0 -0
- {empathy_framework-5.0.1.dist-info → empathy_framework-5.1.0.dist-info}/entry_points.txt +0 -0
- {empathy_framework-5.0.1.dist-info → empathy_framework-5.1.0.dist-info}/top_level.txt +0 -0
|
@@ -32,10 +32,10 @@ from __future__ import annotations
|
|
|
32
32
|
|
|
33
33
|
import json
|
|
34
34
|
import logging
|
|
35
|
-
import
|
|
36
|
-
from dataclasses import dataclass
|
|
35
|
+
from collections.abc import Iterator
|
|
36
|
+
from dataclasses import dataclass
|
|
37
37
|
from datetime import datetime
|
|
38
|
-
from typing import Any
|
|
38
|
+
from typing import Any
|
|
39
39
|
|
|
40
40
|
logger = logging.getLogger(__name__)
|
|
41
41
|
|
|
@@ -103,14 +103,14 @@ class EventStreamer:
|
|
|
103
103
|
Publishes events to Redis Streams and provides methods for consuming
|
|
104
104
|
events via polling or blocking reads.
|
|
105
105
|
|
|
106
|
-
Stream naming:
|
|
106
|
+
Stream naming: stream:{event_type}
|
|
107
107
|
Examples:
|
|
108
|
-
-
|
|
109
|
-
-
|
|
110
|
-
-
|
|
108
|
+
- stream:agent_heartbeat
|
|
109
|
+
- stream:coordination_signal
|
|
110
|
+
- stream:workflow_progress
|
|
111
111
|
"""
|
|
112
112
|
|
|
113
|
-
STREAM_PREFIX = "
|
|
113
|
+
STREAM_PREFIX = "stream:"
|
|
114
114
|
MAX_STREAM_LENGTH = 10000 # Trim streams to last 10K events
|
|
115
115
|
DEFAULT_BLOCK_MS = 5000 # 5 seconds blocking read timeout
|
|
116
116
|
|
|
@@ -142,7 +142,7 @@ class EventStreamer:
|
|
|
142
142
|
event_type: Type of event
|
|
143
143
|
|
|
144
144
|
Returns:
|
|
145
|
-
Stream key (e.g., "
|
|
145
|
+
Stream key (e.g., "stream:agent_heartbeat")
|
|
146
146
|
"""
|
|
147
147
|
return f"{self.STREAM_PREFIX}{event_type}"
|
|
148
148
|
|
|
@@ -162,7 +162,7 @@ class EventStreamer:
|
|
|
162
162
|
Returns:
|
|
163
163
|
Event ID (Redis stream entry ID) if successful, empty string otherwise
|
|
164
164
|
"""
|
|
165
|
-
if not self.memory or not hasattr(self.memory, "
|
|
165
|
+
if not self.memory or not hasattr(self.memory, "_client") or not self.memory._client:
|
|
166
166
|
logger.debug("Cannot publish event: no Redis backend")
|
|
167
167
|
return ""
|
|
168
168
|
|
|
@@ -178,7 +178,7 @@ class EventStreamer:
|
|
|
178
178
|
|
|
179
179
|
try:
|
|
180
180
|
# Add to stream with automatic trimming (MAXLEN)
|
|
181
|
-
event_id = self.memory.
|
|
181
|
+
event_id = self.memory._client.xadd(
|
|
182
182
|
stream_key,
|
|
183
183
|
entry,
|
|
184
184
|
maxlen=self.MAX_STREAM_LENGTH,
|
|
@@ -219,7 +219,7 @@ class EventStreamer:
|
|
|
219
219
|
>>> for event in streamer.consume_events(event_types=["agent_heartbeat"]):
|
|
220
220
|
... print(f"Agent {event.data['agent_id']} status: {event.data['status']}")
|
|
221
221
|
"""
|
|
222
|
-
if not self.memory or not hasattr(self.memory, "
|
|
222
|
+
if not self.memory or not hasattr(self.memory, "_client") or not self.memory._client:
|
|
223
223
|
logger.warning("Cannot consume events: no Redis backend")
|
|
224
224
|
return
|
|
225
225
|
|
|
@@ -230,7 +230,7 @@ class EventStreamer:
|
|
|
230
230
|
streams = {self._get_stream_key(et): start_id for et in event_types}
|
|
231
231
|
else:
|
|
232
232
|
# Subscribe to all event streams (expensive - requires KEYS scan)
|
|
233
|
-
all_streams = self.memory.
|
|
233
|
+
all_streams = self.memory._client.keys(f"{self.STREAM_PREFIX}*")
|
|
234
234
|
streams = {s.decode("utf-8") if isinstance(s, bytes) else s: start_id for s in all_streams}
|
|
235
235
|
|
|
236
236
|
if not streams:
|
|
@@ -243,7 +243,7 @@ class EventStreamer:
|
|
|
243
243
|
try:
|
|
244
244
|
while True:
|
|
245
245
|
# XREAD: blocking read from multiple streams
|
|
246
|
-
results = self.memory.
|
|
246
|
+
results = self.memory._client.xread(
|
|
247
247
|
last_ids,
|
|
248
248
|
count=count,
|
|
249
249
|
block=block_ms,
|
|
@@ -294,7 +294,7 @@ class EventStreamer:
|
|
|
294
294
|
Returns:
|
|
295
295
|
List of recent events (newest first)
|
|
296
296
|
"""
|
|
297
|
-
if not self.memory or not hasattr(self.memory, "
|
|
297
|
+
if not self.memory or not hasattr(self.memory, "_client") or not self.memory._client:
|
|
298
298
|
logger.debug("Cannot get recent events: no Redis backend")
|
|
299
299
|
return []
|
|
300
300
|
|
|
@@ -302,7 +302,7 @@ class EventStreamer:
|
|
|
302
302
|
|
|
303
303
|
try:
|
|
304
304
|
# XREVRANGE: get events in reverse chronological order
|
|
305
|
-
results = self.memory.
|
|
305
|
+
results = self.memory._client.xrevrange(
|
|
306
306
|
stream_key,
|
|
307
307
|
max=end_id,
|
|
308
308
|
min=start_id,
|
|
@@ -333,13 +333,13 @@ class EventStreamer:
|
|
|
333
333
|
Returns:
|
|
334
334
|
Dictionary with stream info (length, first_entry, last_entry, etc.)
|
|
335
335
|
"""
|
|
336
|
-
if not self.memory or not hasattr(self.memory, "
|
|
336
|
+
if not self.memory or not hasattr(self.memory, "_client") or not self.memory._client:
|
|
337
337
|
return {}
|
|
338
338
|
|
|
339
339
|
stream_key = self._get_stream_key(event_type)
|
|
340
340
|
|
|
341
341
|
try:
|
|
342
|
-
info = self.memory.
|
|
342
|
+
info = self.memory._client.xinfo_stream(stream_key)
|
|
343
343
|
|
|
344
344
|
# Decode bytes keys/values
|
|
345
345
|
decoded_info = {}
|
|
@@ -365,13 +365,13 @@ class EventStreamer:
|
|
|
365
365
|
Returns:
|
|
366
366
|
True if deleted, False otherwise
|
|
367
367
|
"""
|
|
368
|
-
if not self.memory or not hasattr(self.memory, "
|
|
368
|
+
if not self.memory or not hasattr(self.memory, "_client") or not self.memory._client:
|
|
369
369
|
return False
|
|
370
370
|
|
|
371
371
|
stream_key = self._get_stream_key(event_type)
|
|
372
372
|
|
|
373
373
|
try:
|
|
374
|
-
result = self.memory.
|
|
374
|
+
result = self.memory._client.delete(stream_key)
|
|
375
375
|
return result > 0
|
|
376
376
|
except Exception as e:
|
|
377
377
|
logger.error(f"Failed to delete stream {event_type}: {e}")
|
|
@@ -387,14 +387,14 @@ class EventStreamer:
|
|
|
387
387
|
Returns:
|
|
388
388
|
Number of events trimmed
|
|
389
389
|
"""
|
|
390
|
-
if not self.memory or not hasattr(self.memory, "
|
|
390
|
+
if not self.memory or not hasattr(self.memory, "_client") or not self.memory._client:
|
|
391
391
|
return 0
|
|
392
392
|
|
|
393
393
|
stream_key = self._get_stream_key(event_type)
|
|
394
394
|
|
|
395
395
|
try:
|
|
396
396
|
# XTRIM: trim to approximate max length
|
|
397
|
-
trimmed = self.memory.
|
|
397
|
+
trimmed = self.memory._client.xtrim(
|
|
398
398
|
stream_key,
|
|
399
399
|
maxlen=max_length,
|
|
400
400
|
approximate=True,
|