empathy-framework 5.0.1__py3-none-any.whl → 5.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. {empathy_framework-5.0.1.dist-info → empathy_framework-5.1.0.dist-info}/METADATA +311 -150
  2. {empathy_framework-5.0.1.dist-info → empathy_framework-5.1.0.dist-info}/RECORD +60 -33
  3. empathy_framework-5.1.0.dist-info/licenses/LICENSE +201 -0
  4. empathy_framework-5.1.0.dist-info/licenses/LICENSE_CHANGE_ANNOUNCEMENT.md +101 -0
  5. empathy_llm_toolkit/providers.py +175 -35
  6. empathy_llm_toolkit/utils/tokens.py +150 -30
  7. empathy_os/__init__.py +1 -1
  8. empathy_os/cli/commands/batch.py +256 -0
  9. empathy_os/cli/commands/cache.py +248 -0
  10. empathy_os/cli/commands/inspect.py +1 -2
  11. empathy_os/cli/commands/metrics.py +1 -1
  12. empathy_os/cli/commands/routing.py +285 -0
  13. empathy_os/cli/commands/workflow.py +2 -1
  14. empathy_os/cli/parsers/__init__.py +6 -0
  15. empathy_os/cli/parsers/batch.py +118 -0
  16. empathy_os/cli/parsers/cache 2.py +65 -0
  17. empathy_os/cli/parsers/cache.py +65 -0
  18. empathy_os/cli/parsers/routing.py +110 -0
  19. empathy_os/cli_minimal.py +3 -3
  20. empathy_os/cli_router 2.py +416 -0
  21. empathy_os/dashboard/__init__.py +1 -2
  22. empathy_os/dashboard/app 2.py +512 -0
  23. empathy_os/dashboard/app.py +1 -1
  24. empathy_os/dashboard/simple_server 2.py +403 -0
  25. empathy_os/dashboard/standalone_server 2.py +536 -0
  26. empathy_os/dashboard/standalone_server.py +22 -11
  27. empathy_os/memory/types 2.py +441 -0
  28. empathy_os/metrics/collector.py +31 -0
  29. empathy_os/models/__init__.py +19 -0
  30. empathy_os/models/adaptive_routing 2.py +437 -0
  31. empathy_os/models/auth_cli.py +444 -0
  32. empathy_os/models/auth_strategy.py +450 -0
  33. empathy_os/models/token_estimator.py +21 -13
  34. empathy_os/project_index/scanner_parallel 2.py +291 -0
  35. empathy_os/telemetry/agent_coordination 2.py +478 -0
  36. empathy_os/telemetry/agent_coordination.py +14 -16
  37. empathy_os/telemetry/agent_tracking 2.py +350 -0
  38. empathy_os/telemetry/agent_tracking.py +18 -20
  39. empathy_os/telemetry/approval_gates 2.py +563 -0
  40. empathy_os/telemetry/approval_gates.py +27 -39
  41. empathy_os/telemetry/event_streaming 2.py +405 -0
  42. empathy_os/telemetry/event_streaming.py +22 -22
  43. empathy_os/telemetry/feedback_loop 2.py +557 -0
  44. empathy_os/telemetry/feedback_loop.py +14 -17
  45. empathy_os/workflows/__init__.py +8 -0
  46. empathy_os/workflows/autonomous_test_gen.py +569 -0
  47. empathy_os/workflows/batch_processing.py +56 -10
  48. empathy_os/workflows/bug_predict.py +45 -0
  49. empathy_os/workflows/code_review.py +92 -22
  50. empathy_os/workflows/document_gen.py +594 -62
  51. empathy_os/workflows/llm_base.py +363 -0
  52. empathy_os/workflows/perf_audit.py +69 -0
  53. empathy_os/workflows/release_prep.py +54 -0
  54. empathy_os/workflows/security_audit.py +154 -79
  55. empathy_os/workflows/test_gen.py +60 -0
  56. empathy_os/workflows/test_gen_behavioral.py +477 -0
  57. empathy_os/workflows/test_gen_parallel.py +341 -0
  58. empathy_framework-5.0.1.dist-info/licenses/LICENSE +0 -139
  59. {empathy_framework-5.0.1.dist-info → empathy_framework-5.1.0.dist-info}/WHEEL +0 -0
  60. {empathy_framework-5.0.1.dist-info → empathy_framework-5.1.0.dist-info}/entry_points.txt +0 -0
  61. {empathy_framework-5.0.1.dist-info → empathy_framework-5.1.0.dist-info}/top_level.txt +0 -0
@@ -236,19 +236,13 @@ class ApprovalGate:
236
236
  # Store approval request (for UI to retrieve)
237
237
  request_key = f"approval_request:{request_id}"
238
238
  try:
239
- if hasattr(self.memory, "stash"):
240
- self.memory.stash(
241
- key=request_key,
242
- data=request.to_dict(),
243
- credentials=None,
244
- ttl_seconds=int(timeout) + 60, # TTL = timeout + buffer
245
- )
246
- elif hasattr(self.memory, "_redis"):
239
+ # Use direct Redis access for custom TTL
240
+ if hasattr(self.memory, "_client") and self.memory._client:
247
241
  import json
248
242
 
249
- self.memory._redis.setex(request_key, int(timeout) + 60, json.dumps(request.to_dict()))
243
+ self.memory._client.setex(request_key, int(timeout) + 60, json.dumps(request.to_dict()))
250
244
  else:
251
- logger.warning("Cannot store approval request: unsupported memory type")
245
+ logger.warning("Cannot store approval request: no Redis backend available")
252
246
  except Exception as e:
253
247
  logger.error(f"Failed to store approval request: {e}")
254
248
  return ApprovalResponse(
@@ -294,12 +288,11 @@ class ApprovalGate:
294
288
  # Update request status to timeout
295
289
  request.status = "timeout"
296
290
  try:
297
- if hasattr(self.memory, "stash"):
298
- self.memory.stash(key=request_key, data=request.to_dict(), credentials=None, ttl_seconds=60)
299
- elif hasattr(self.memory, "_redis"):
291
+ # Use direct Redis access
292
+ if hasattr(self.memory, "_client") and self.memory._client:
300
293
  import json
301
294
 
302
- self.memory._redis.setex(request_key, 60, json.dumps(request.to_dict()))
295
+ self.memory._client.setex(request_key, 60, json.dumps(request.to_dict()))
303
296
  except Exception:
304
297
  pass
305
298
 
@@ -322,10 +315,10 @@ class ApprovalGate:
322
315
  if hasattr(self.memory, "retrieve"):
323
316
  data = self.memory.retrieve(response_key, credentials=None)
324
317
  # Try direct Redis access
325
- elif hasattr(self.memory, "_redis"):
318
+ elif hasattr(self.memory, "_client"):
326
319
  import json
327
320
 
328
- raw_data = self.memory._redis.get(response_key)
321
+ raw_data = self.memory._client.get(response_key)
329
322
  if raw_data:
330
323
  if isinstance(raw_data, bytes):
331
324
  raw_data = raw_data.decode("utf-8")
@@ -376,16 +369,13 @@ class ApprovalGate:
376
369
  # Store approval response (for workflow to retrieve)
377
370
  response_key = f"approval_response:{request_id}"
378
371
  try:
379
- if hasattr(self.memory, "stash"):
380
- self.memory.stash(
381
- key=response_key, data=response.to_dict(), credentials=None, ttl_seconds=300 # 5 min TTL
382
- )
383
- elif hasattr(self.memory, "_redis"):
372
+ # Use direct Redis access
373
+ if hasattr(self.memory, "_client") and self.memory._client:
384
374
  import json
385
375
 
386
- self.memory._redis.setex(response_key, 300, json.dumps(response.to_dict()))
376
+ self.memory._client.setex(response_key, 300, json.dumps(response.to_dict()))
387
377
  else:
388
- logger.warning("Cannot store approval response: unsupported memory type")
378
+ logger.warning("Cannot store approval response: no Redis backend available")
389
379
  return False
390
380
  except Exception as e:
391
381
  logger.error(f"Failed to store approval response: {e}")
@@ -396,10 +386,10 @@ class ApprovalGate:
396
386
  try:
397
387
  if hasattr(self.memory, "retrieve"):
398
388
  request_data = self.memory.retrieve(request_key, credentials=None)
399
- elif hasattr(self.memory, "_redis"):
389
+ elif hasattr(self.memory, "_client"):
400
390
  import json
401
391
 
402
- raw_data = self.memory._redis.get(request_key)
392
+ raw_data = self.memory._client.get(request_key)
403
393
  if raw_data:
404
394
  if isinstance(raw_data, bytes):
405
395
  raw_data = raw_data.decode("utf-8")
@@ -413,12 +403,11 @@ class ApprovalGate:
413
403
  request = ApprovalRequest.from_dict(request_data)
414
404
  request.status = "approved" if approved else "rejected"
415
405
 
416
- if hasattr(self.memory, "stash"):
417
- self.memory.stash(key=request_key, data=request.to_dict(), credentials=None, ttl_seconds=300)
418
- elif hasattr(self.memory, "_redis"):
406
+ # Use direct Redis access
407
+ if hasattr(self.memory, "_client") and self.memory._client:
419
408
  import json
420
409
 
421
- self.memory._redis.setex(request_key, 300, json.dumps(request.to_dict()))
410
+ self.memory._client.setex(request_key, 300, json.dumps(request.to_dict()))
422
411
  except Exception as e:
423
412
  logger.debug(f"Failed to update request status: {e}")
424
413
 
@@ -457,12 +446,12 @@ class ApprovalGate:
457
446
  >>> for request in pending:
458
447
  ... print(f"{request.approval_type}: {request.context}")
459
448
  """
460
- if not self.memory or not hasattr(self.memory, "_redis"):
449
+ if not self.memory or not hasattr(self.memory, "_client"):
461
450
  return []
462
451
 
463
452
  try:
464
453
  # Scan for approval_request:* keys
465
- keys = self.memory._redis.keys("approval_request:*")
454
+ keys = self.memory._client.keys("approval_request:*")
466
455
 
467
456
  requests = []
468
457
  for key in keys:
@@ -475,7 +464,7 @@ class ApprovalGate:
475
464
  else:
476
465
  import json
477
466
 
478
- raw_data = self.memory._redis.get(key)
467
+ raw_data = self.memory._client.get(key)
479
468
  if raw_data:
480
469
  if isinstance(raw_data, bytes):
481
470
  raw_data = raw_data.decode("utf-8")
@@ -512,11 +501,11 @@ class ApprovalGate:
512
501
  Returns:
513
502
  Number of requests cleared
514
503
  """
515
- if not self.memory or not hasattr(self.memory, "_redis"):
504
+ if not self.memory or not hasattr(self.memory, "_client"):
516
505
  return 0
517
506
 
518
507
  try:
519
- keys = self.memory._redis.keys("approval_request:*")
508
+ keys = self.memory._client.keys("approval_request:*")
520
509
  now = datetime.utcnow()
521
510
  cleared = 0
522
511
 
@@ -530,7 +519,7 @@ class ApprovalGate:
530
519
  else:
531
520
  import json
532
521
 
533
- raw_data = self.memory._redis.get(key)
522
+ raw_data = self.memory._client.get(key)
534
523
  if raw_data:
535
524
  if isinstance(raw_data, bytes):
536
525
  raw_data = raw_data.decode("utf-8")
@@ -548,12 +537,11 @@ class ApprovalGate:
548
537
  if elapsed > request.timeout_seconds and request.status == "pending":
549
538
  # Update to timeout status
550
539
  request.status = "timeout"
551
- if hasattr(self.memory, "stash"):
552
- self.memory.stash(key=key, data=request.to_dict(), credentials=None, ttl_seconds=60)
553
- elif hasattr(self.memory, "_redis"):
540
+ # Use direct Redis access
541
+ if hasattr(self.memory, "_client") and self.memory._client:
554
542
  import json
555
543
 
556
- self.memory._redis.setex(key, 60, json.dumps(request.to_dict()))
544
+ self.memory._client.setex(key, 60, json.dumps(request.to_dict()))
557
545
 
558
546
  cleared += 1
559
547
 
@@ -0,0 +1,405 @@
1
+ """Real-Time Event Streaming using Redis Streams.
2
+
3
+ Pattern 4 from Agent Coordination Architecture - Publish agent events
4
+ to Redis Streams for real-time monitoring and WebSocket consumption.
5
+
6
+ Events types:
7
+ - agent_heartbeat: Agent liveness updates
8
+ - coordination_signal: Inter-agent coordination messages
9
+ - workflow_progress: Workflow stage progress
10
+ - agent_error: Agent failures and errors
11
+
12
+ Usage:
13
+ # Publish events
14
+ streamer = EventStreamer()
15
+ streamer.publish_event(
16
+ event_type="agent_heartbeat",
17
+ data={"agent_id": "worker-1", "status": "running", "progress": 0.5}
18
+ )
19
+
20
+ # Consume events (blocking)
21
+ for event in streamer.consume_events(event_types=["agent_heartbeat"]):
22
+ print(f"Received: {event}")
23
+
24
+ # Get recent events (non-blocking)
25
+ recent = streamer.get_recent_events(event_type="agent_heartbeat", count=100)
26
+
27
+ Copyright 2025 Smart-AI-Memory
28
+ Licensed under Fair Source License 0.9
29
+ """
30
+
31
+ from __future__ import annotations
32
+
33
+ import json
34
+ import logging
35
+ import time
36
+ from dataclasses import dataclass, field
37
+ from datetime import datetime
38
+ from typing import Any, Iterator
39
+
40
+ logger = logging.getLogger(__name__)
41
+
42
+
43
+ @dataclass
44
+ class StreamEvent:
45
+ """Event published to Redis Stream."""
46
+
47
+ event_id: str # Redis stream entry ID (e.g., "1706356800000-0")
48
+ event_type: str # "agent_heartbeat", "coordination_signal", etc.
49
+ timestamp: datetime
50
+ data: dict[str, Any]
51
+ source: str = "empathy_os" # Source system
52
+
53
+ def to_dict(self) -> dict[str, Any]:
54
+ """Convert to dictionary for serialization."""
55
+ return {
56
+ "event_id": self.event_id,
57
+ "event_type": self.event_type,
58
+ "timestamp": self.timestamp.isoformat() if isinstance(self.timestamp, datetime) else self.timestamp,
59
+ "data": self.data,
60
+ "source": self.source,
61
+ }
62
+
63
+ @classmethod
64
+ def from_redis_entry(cls, event_id: str, entry_data: dict[bytes, bytes]) -> StreamEvent:
65
+ """Create from Redis stream entry.
66
+
67
+ Args:
68
+ event_id: Redis stream entry ID
69
+ entry_data: Raw entry data from Redis (bytes dict)
70
+
71
+ Returns:
72
+ StreamEvent instance
73
+ """
74
+ # Decode bytes to strings
75
+ decoded = {k.decode("utf-8"): v.decode("utf-8") for k, v in entry_data.items()}
76
+
77
+ # Parse timestamp
78
+ timestamp_str = decoded.get("timestamp", "")
79
+ try:
80
+ timestamp = datetime.fromisoformat(timestamp_str)
81
+ except (ValueError, AttributeError):
82
+ timestamp = datetime.utcnow()
83
+
84
+ # Parse data field (JSON)
85
+ data_str = decoded.get("data", "{}")
86
+ try:
87
+ data = json.loads(data_str)
88
+ except json.JSONDecodeError:
89
+ data = {}
90
+
91
+ return cls(
92
+ event_id=event_id,
93
+ event_type=decoded.get("event_type", "unknown"),
94
+ timestamp=timestamp,
95
+ data=data,
96
+ source=decoded.get("source", "empathy_os"),
97
+ )
98
+
99
+
100
+ class EventStreamer:
101
+ """Real-time event streaming using Redis Streams.
102
+
103
+ Publishes events to Redis Streams and provides methods for consuming
104
+ events via polling or blocking reads.
105
+
106
+ Stream naming: empathy:events:{event_type}
107
+ Examples:
108
+ - empathy:events:agent_heartbeat
109
+ - empathy:events:coordination_signal
110
+ - empathy:events:workflow_progress
111
+ """
112
+
113
+ STREAM_PREFIX = "empathy:events:"
114
+ MAX_STREAM_LENGTH = 10000 # Trim streams to last 10K events
115
+ DEFAULT_BLOCK_MS = 5000 # 5 seconds blocking read timeout
116
+
117
+ def __init__(self, memory=None):
118
+ """Initialize event streamer.
119
+
120
+ Args:
121
+ memory: Memory backend with Redis connection
122
+ """
123
+ self.memory = memory
124
+
125
+ if self.memory is None:
126
+ try:
127
+ from empathy_os.telemetry import UsageTracker
128
+
129
+ tracker = UsageTracker.get_instance()
130
+ if hasattr(tracker, "_memory"):
131
+ self.memory = tracker._memory
132
+ except (ImportError, AttributeError):
133
+ pass
134
+
135
+ if self.memory is None:
136
+ logger.warning("No memory backend available for event streaming")
137
+
138
+ def _get_stream_key(self, event_type: str) -> str:
139
+ """Get Redis stream key for an event type.
140
+
141
+ Args:
142
+ event_type: Type of event
143
+
144
+ Returns:
145
+ Stream key (e.g., "empathy:events:agent_heartbeat")
146
+ """
147
+ return f"{self.STREAM_PREFIX}{event_type}"
148
+
149
+ def publish_event(
150
+ self,
151
+ event_type: str,
152
+ data: dict[str, Any],
153
+ source: str = "empathy_os",
154
+ ) -> str:
155
+ """Publish an event to Redis Stream.
156
+
157
+ Args:
158
+ event_type: Type of event (e.g., "agent_heartbeat", "coordination_signal")
159
+ data: Event payload data
160
+ source: Source system (default "empathy_os")
161
+
162
+ Returns:
163
+ Event ID (Redis stream entry ID) if successful, empty string otherwise
164
+ """
165
+ if not self.memory or not hasattr(self.memory, "_redis"):
166
+ logger.debug("Cannot publish event: no Redis backend")
167
+ return ""
168
+
169
+ stream_key = self._get_stream_key(event_type)
170
+
171
+ # Prepare entry data
172
+ entry = {
173
+ "event_type": event_type,
174
+ "timestamp": datetime.utcnow().isoformat(),
175
+ "data": json.dumps(data),
176
+ "source": source,
177
+ }
178
+
179
+ try:
180
+ # Add to stream with automatic trimming (MAXLEN)
181
+ event_id = self.memory._redis.xadd(
182
+ stream_key,
183
+ entry,
184
+ maxlen=self.MAX_STREAM_LENGTH,
185
+ approximate=True, # Use ~ for performance
186
+ )
187
+
188
+ # Decode event_id if bytes
189
+ if isinstance(event_id, bytes):
190
+ event_id = event_id.decode("utf-8")
191
+
192
+ logger.debug(f"Published event {event_type}: {event_id}")
193
+ return event_id
194
+
195
+ except Exception as e:
196
+ logger.error(f"Failed to publish event {event_type}: {e}")
197
+ return ""
198
+
199
+ def consume_events(
200
+ self,
201
+ event_types: list[str] | None = None,
202
+ block_ms: int | None = None,
203
+ count: int = 10,
204
+ start_id: str = "$",
205
+ ) -> Iterator[StreamEvent]:
206
+ """Consume events from Redis Streams (blocking iterator).
207
+
208
+ Args:
209
+ event_types: List of event types to consume (None = all types)
210
+ block_ms: Blocking timeout in milliseconds (None = DEFAULT_BLOCK_MS)
211
+ count: Number of events to read per batch
212
+ start_id: Stream position to start from ("$" = new events only, "0" = all events)
213
+
214
+ Yields:
215
+ StreamEvent instances as they arrive
216
+
217
+ Example:
218
+ >>> streamer = EventStreamer()
219
+ >>> for event in streamer.consume_events(event_types=["agent_heartbeat"]):
220
+ ... print(f"Agent {event.data['agent_id']} status: {event.data['status']}")
221
+ """
222
+ if not self.memory or not hasattr(self.memory, "_redis"):
223
+ logger.warning("Cannot consume events: no Redis backend")
224
+ return
225
+
226
+ block_ms = block_ms if block_ms is not None else self.DEFAULT_BLOCK_MS
227
+
228
+ # Determine streams to read
229
+ if event_types:
230
+ streams = {self._get_stream_key(et): start_id for et in event_types}
231
+ else:
232
+ # Subscribe to all event streams (expensive - requires KEYS scan)
233
+ all_streams = self.memory._redis.keys(f"{self.STREAM_PREFIX}*")
234
+ streams = {s.decode("utf-8") if isinstance(s, bytes) else s: start_id for s in all_streams}
235
+
236
+ if not streams:
237
+ logger.debug("No streams to consume")
238
+ return
239
+
240
+ # Track last IDs for each stream
241
+ last_ids = streams.copy()
242
+
243
+ try:
244
+ while True:
245
+ # XREAD: blocking read from multiple streams
246
+ results = self.memory._redis.xread(
247
+ last_ids,
248
+ count=count,
249
+ block=block_ms,
250
+ )
251
+
252
+ if not results:
253
+ # Timeout - no new events
254
+ continue
255
+
256
+ # Process results
257
+ for stream_key, entries in results:
258
+ # Decode stream key if bytes
259
+ if isinstance(stream_key, bytes):
260
+ stream_key = stream_key.decode("utf-8")
261
+
262
+ for event_id, entry_data in entries:
263
+ # Decode event_id if bytes
264
+ if isinstance(event_id, bytes):
265
+ event_id = event_id.decode("utf-8")
266
+
267
+ # Parse event
268
+ event = StreamEvent.from_redis_entry(event_id, entry_data)
269
+ yield event
270
+
271
+ # Update last_id for this stream
272
+ last_ids[stream_key] = event_id
273
+
274
+ except KeyboardInterrupt:
275
+ logger.info("Event consumption interrupted")
276
+ except Exception as e:
277
+ logger.error(f"Error consuming events: {e}")
278
+
279
+ def get_recent_events(
280
+ self,
281
+ event_type: str,
282
+ count: int = 100,
283
+ start_id: str = "-",
284
+ end_id: str = "+",
285
+ ) -> list[StreamEvent]:
286
+ """Get recent events from a stream (non-blocking).
287
+
288
+ Args:
289
+ event_type: Type of event to retrieve
290
+ count: Maximum number of events to return
291
+ start_id: Start position ("-" = oldest, specific ID = from that point)
292
+ end_id: End position ("+" = newest, specific ID = up to that point)
293
+
294
+ Returns:
295
+ List of recent events (newest first)
296
+ """
297
+ if not self.memory or not hasattr(self.memory, "_redis"):
298
+ logger.debug("Cannot get recent events: no Redis backend")
299
+ return []
300
+
301
+ stream_key = self._get_stream_key(event_type)
302
+
303
+ try:
304
+ # XREVRANGE: get events in reverse chronological order
305
+ results = self.memory._redis.xrevrange(
306
+ stream_key,
307
+ max=end_id,
308
+ min=start_id,
309
+ count=count,
310
+ )
311
+
312
+ events = []
313
+ for event_id, entry_data in results:
314
+ # Decode event_id if bytes
315
+ if isinstance(event_id, bytes):
316
+ event_id = event_id.decode("utf-8")
317
+
318
+ event = StreamEvent.from_redis_entry(event_id, entry_data)
319
+ events.append(event)
320
+
321
+ return events
322
+
323
+ except Exception as e:
324
+ logger.error(f"Failed to get recent events for {event_type}: {e}")
325
+ return []
326
+
327
+ def get_stream_info(self, event_type: str) -> dict[str, Any]:
328
+ """Get information about a stream.
329
+
330
+ Args:
331
+ event_type: Type of event stream
332
+
333
+ Returns:
334
+ Dictionary with stream info (length, first_entry, last_entry, etc.)
335
+ """
336
+ if not self.memory or not hasattr(self.memory, "_redis"):
337
+ return {}
338
+
339
+ stream_key = self._get_stream_key(event_type)
340
+
341
+ try:
342
+ info = self.memory._redis.xinfo_stream(stream_key)
343
+
344
+ # Decode bytes keys/values
345
+ decoded_info = {}
346
+ for key, value in info.items():
347
+ if isinstance(key, bytes):
348
+ key = key.decode("utf-8")
349
+ if isinstance(value, bytes):
350
+ value = value.decode("utf-8")
351
+ decoded_info[key] = value
352
+
353
+ return decoded_info
354
+
355
+ except Exception as e:
356
+ logger.debug(f"Failed to get stream info for {event_type}: {e}")
357
+ return {}
358
+
359
+ def delete_stream(self, event_type: str) -> bool:
360
+ """Delete an event stream.
361
+
362
+ Args:
363
+ event_type: Type of event stream to delete
364
+
365
+ Returns:
366
+ True if deleted, False otherwise
367
+ """
368
+ if not self.memory or not hasattr(self.memory, "_redis"):
369
+ return False
370
+
371
+ stream_key = self._get_stream_key(event_type)
372
+
373
+ try:
374
+ result = self.memory._redis.delete(stream_key)
375
+ return result > 0
376
+ except Exception as e:
377
+ logger.error(f"Failed to delete stream {event_type}: {e}")
378
+ return False
379
+
380
+ def trim_stream(self, event_type: str, max_length: int = 1000) -> int:
381
+ """Trim a stream to a maximum length.
382
+
383
+ Args:
384
+ event_type: Type of event stream
385
+ max_length: Maximum number of events to keep
386
+
387
+ Returns:
388
+ Number of events trimmed
389
+ """
390
+ if not self.memory or not hasattr(self.memory, "_redis"):
391
+ return 0
392
+
393
+ stream_key = self._get_stream_key(event_type)
394
+
395
+ try:
396
+ # XTRIM: trim to approximate max length
397
+ trimmed = self.memory._redis.xtrim(
398
+ stream_key,
399
+ maxlen=max_length,
400
+ approximate=True,
401
+ )
402
+ return trimmed
403
+ except Exception as e:
404
+ logger.error(f"Failed to trim stream {event_type}: {e}")
405
+ return 0