empathy-framework 5.1.1__py3-none-any.whl → 5.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {empathy_framework-5.1.1.dist-info → empathy_framework-5.3.0.dist-info}/METADATA +79 -6
- {empathy_framework-5.1.1.dist-info → empathy_framework-5.3.0.dist-info}/RECORD +83 -64
- empathy_os/__init__.py +1 -1
- empathy_os/cache/hybrid.py +5 -1
- empathy_os/cli/commands/batch.py +8 -0
- empathy_os/cli/commands/profiling.py +4 -0
- empathy_os/cli/commands/workflow.py +8 -4
- empathy_os/cli_router.py +9 -0
- empathy_os/config.py +15 -2
- empathy_os/core_modules/__init__.py +15 -0
- empathy_os/dashboard/simple_server.py +62 -30
- empathy_os/mcp/__init__.py +10 -0
- empathy_os/mcp/server.py +506 -0
- empathy_os/memory/control_panel.py +1 -131
- empathy_os/memory/control_panel_support.py +145 -0
- empathy_os/memory/encryption.py +159 -0
- empathy_os/memory/long_term.py +46 -631
- empathy_os/memory/long_term_types.py +99 -0
- empathy_os/memory/mixins/__init__.py +25 -0
- empathy_os/memory/mixins/backend_init_mixin.py +249 -0
- empathy_os/memory/mixins/capabilities_mixin.py +208 -0
- empathy_os/memory/mixins/handoff_mixin.py +208 -0
- empathy_os/memory/mixins/lifecycle_mixin.py +49 -0
- empathy_os/memory/mixins/long_term_mixin.py +352 -0
- empathy_os/memory/mixins/promotion_mixin.py +109 -0
- empathy_os/memory/mixins/short_term_mixin.py +182 -0
- empathy_os/memory/short_term.py +61 -12
- empathy_os/memory/simple_storage.py +302 -0
- empathy_os/memory/storage_backend.py +167 -0
- empathy_os/memory/types.py +8 -3
- empathy_os/memory/unified.py +21 -1120
- empathy_os/meta_workflows/cli_commands/__init__.py +56 -0
- empathy_os/meta_workflows/cli_commands/agent_commands.py +321 -0
- empathy_os/meta_workflows/cli_commands/analytics_commands.py +442 -0
- empathy_os/meta_workflows/cli_commands/config_commands.py +232 -0
- empathy_os/meta_workflows/cli_commands/memory_commands.py +182 -0
- empathy_os/meta_workflows/cli_commands/template_commands.py +354 -0
- empathy_os/meta_workflows/cli_commands/workflow_commands.py +382 -0
- empathy_os/meta_workflows/cli_meta_workflows.py +52 -1802
- empathy_os/models/telemetry/__init__.py +71 -0
- empathy_os/models/telemetry/analytics.py +594 -0
- empathy_os/models/telemetry/backend.py +196 -0
- empathy_os/models/telemetry/data_models.py +431 -0
- empathy_os/models/telemetry/storage.py +489 -0
- empathy_os/orchestration/__init__.py +35 -0
- empathy_os/orchestration/execution_strategies.py +481 -0
- empathy_os/orchestration/meta_orchestrator.py +488 -1
- empathy_os/routing/workflow_registry.py +36 -0
- empathy_os/telemetry/agent_coordination.py +2 -3
- empathy_os/telemetry/agent_tracking.py +26 -7
- empathy_os/telemetry/approval_gates.py +18 -24
- empathy_os/telemetry/cli.py +19 -724
- empathy_os/telemetry/commands/__init__.py +14 -0
- empathy_os/telemetry/commands/dashboard_commands.py +696 -0
- empathy_os/telemetry/event_streaming.py +7 -3
- empathy_os/telemetry/feedback_loop.py +28 -15
- empathy_os/tools.py +183 -0
- empathy_os/workflows/__init__.py +5 -0
- empathy_os/workflows/autonomous_test_gen.py +860 -161
- empathy_os/workflows/base.py +6 -2
- empathy_os/workflows/code_review.py +4 -1
- empathy_os/workflows/document_gen/__init__.py +25 -0
- empathy_os/workflows/document_gen/config.py +30 -0
- empathy_os/workflows/document_gen/report_formatter.py +162 -0
- empathy_os/workflows/{document_gen.py → document_gen/workflow.py} +5 -184
- empathy_os/workflows/output.py +4 -1
- empathy_os/workflows/progress.py +8 -2
- empathy_os/workflows/security_audit.py +2 -2
- empathy_os/workflows/security_audit_phase3.py +7 -4
- empathy_os/workflows/seo_optimization.py +633 -0
- empathy_os/workflows/test_gen/__init__.py +52 -0
- empathy_os/workflows/test_gen/ast_analyzer.py +249 -0
- empathy_os/workflows/test_gen/config.py +88 -0
- empathy_os/workflows/test_gen/data_models.py +38 -0
- empathy_os/workflows/test_gen/report_formatter.py +289 -0
- empathy_os/workflows/test_gen/test_templates.py +381 -0
- empathy_os/workflows/test_gen/workflow.py +655 -0
- empathy_os/workflows/test_gen.py +42 -1905
- empathy_os/cli/parsers/cache 2.py +0 -65
- empathy_os/cli_router 2.py +0 -416
- empathy_os/dashboard/app 2.py +0 -512
- empathy_os/dashboard/simple_server 2.py +0 -403
- empathy_os/dashboard/standalone_server 2.py +0 -536
- empathy_os/memory/types 2.py +0 -441
- empathy_os/models/adaptive_routing 2.py +0 -437
- empathy_os/models/telemetry.py +0 -1660
- empathy_os/project_index/scanner_parallel 2.py +0 -291
- empathy_os/telemetry/agent_coordination 2.py +0 -478
- empathy_os/telemetry/agent_tracking 2.py +0 -350
- empathy_os/telemetry/approval_gates 2.py +0 -563
- empathy_os/telemetry/event_streaming 2.py +0 -405
- empathy_os/telemetry/feedback_loop 2.py +0 -557
- empathy_os/vscode_bridge 2.py +0 -173
- empathy_os/workflows/progressive/__init__ 2.py +0 -92
- empathy_os/workflows/progressive/cli 2.py +0 -242
- empathy_os/workflows/progressive/core 2.py +0 -488
- empathy_os/workflows/progressive/orchestrator 2.py +0 -701
- empathy_os/workflows/progressive/reports 2.py +0 -528
- empathy_os/workflows/progressive/telemetry 2.py +0 -280
- empathy_os/workflows/progressive/test_gen 2.py +0 -514
- empathy_os/workflows/progressive/workflow 2.py +0 -628
- {empathy_framework-5.1.1.dist-info → empathy_framework-5.3.0.dist-info}/WHEEL +0 -0
- {empathy_framework-5.1.1.dist-info → empathy_framework-5.3.0.dist-info}/entry_points.txt +0 -0
- {empathy_framework-5.1.1.dist-info → empathy_framework-5.3.0.dist-info}/licenses/LICENSE +0 -0
- {empathy_framework-5.1.1.dist-info → empathy_framework-5.3.0.dist-info}/licenses/LICENSE_CHANGE_ANNOUNCEMENT.md +0 -0
- {empathy_framework-5.1.1.dist-info → empathy_framework-5.3.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
"""Pattern promotion mixin for UnifiedMemory.
|
|
2
|
+
|
|
3
|
+
Handles promotion of patterns from short-term to long-term memory.
|
|
4
|
+
|
|
5
|
+
Copyright 2025 Smart AI Memory, LLC
|
|
6
|
+
Licensed under Fair Source 0.9
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from typing import TYPE_CHECKING, Any
|
|
10
|
+
|
|
11
|
+
import structlog
|
|
12
|
+
|
|
13
|
+
if TYPE_CHECKING:
|
|
14
|
+
from ..long_term import Classification
|
|
15
|
+
from ..short_term import AgentCredentials, RedisShortTermMemory
|
|
16
|
+
|
|
17
|
+
logger = structlog.get_logger(__name__)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class PatternPromotionMixin:
|
|
21
|
+
"""Mixin providing pattern promotion capabilities for UnifiedMemory."""
|
|
22
|
+
|
|
23
|
+
# Type hints for attributes that will be provided by UnifiedMemory
|
|
24
|
+
_short_term: "RedisShortTermMemory | None"
|
|
25
|
+
_long_term: Any # SecureMemDocsIntegration | None
|
|
26
|
+
|
|
27
|
+
# Needs access to methods from other mixins
|
|
28
|
+
@property
|
|
29
|
+
def credentials(self) -> "AgentCredentials":
|
|
30
|
+
"""Get credentials - provided by ShortTermOperationsMixin."""
|
|
31
|
+
...
|
|
32
|
+
|
|
33
|
+
def get_staged_patterns(self) -> list[dict]:
|
|
34
|
+
"""Get staged patterns - provided by ShortTermOperationsMixin."""
|
|
35
|
+
...
|
|
36
|
+
|
|
37
|
+
def persist_pattern(
|
|
38
|
+
self,
|
|
39
|
+
content: str,
|
|
40
|
+
pattern_type: str,
|
|
41
|
+
classification: "Classification | str | None" = None,
|
|
42
|
+
auto_classify: bool = True,
|
|
43
|
+
metadata: dict[str, Any] | None = None,
|
|
44
|
+
) -> dict[str, Any] | None:
|
|
45
|
+
"""Persist pattern - provided by LongTermOperationsMixin."""
|
|
46
|
+
...
|
|
47
|
+
|
|
48
|
+
# =========================================================================
|
|
49
|
+
# PATTERN PROMOTION (SHORT-TERM → LONG-TERM)
|
|
50
|
+
# =========================================================================
|
|
51
|
+
|
|
52
|
+
def promote_pattern(
|
|
53
|
+
self,
|
|
54
|
+
staged_pattern_id: str,
|
|
55
|
+
classification: "Classification | str | None" = None,
|
|
56
|
+
auto_classify: bool = True,
|
|
57
|
+
) -> dict[str, Any] | None:
|
|
58
|
+
"""Promote a staged pattern from short-term to long-term memory.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
staged_pattern_id: ID of staged pattern to promote
|
|
62
|
+
classification: Override classification (or auto-detect)
|
|
63
|
+
auto_classify: Auto-detect classification from content
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
Long-term storage result, or None if failed
|
|
67
|
+
|
|
68
|
+
"""
|
|
69
|
+
if not self._short_term or not self._long_term:
|
|
70
|
+
logger.error("memory_backends_unavailable")
|
|
71
|
+
return None
|
|
72
|
+
|
|
73
|
+
# Retrieve staged pattern
|
|
74
|
+
staged_patterns = self.get_staged_patterns()
|
|
75
|
+
staged = next(
|
|
76
|
+
(p for p in staged_patterns if p.get("pattern_id") == staged_pattern_id),
|
|
77
|
+
None,
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
if not staged:
|
|
81
|
+
logger.warning("staged_pattern_not_found", pattern_id=staged_pattern_id)
|
|
82
|
+
return None
|
|
83
|
+
|
|
84
|
+
# Persist to long-term storage
|
|
85
|
+
# Content is stored in context dict by stage_pattern
|
|
86
|
+
context = staged.get("context", {})
|
|
87
|
+
content = context.get("content", "") or staged.get("description", "")
|
|
88
|
+
result = self.persist_pattern(
|
|
89
|
+
content=content,
|
|
90
|
+
pattern_type=staged.get("pattern_type", "general"),
|
|
91
|
+
classification=classification,
|
|
92
|
+
auto_classify=auto_classify,
|
|
93
|
+
metadata=context,
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
if result:
|
|
97
|
+
# Remove from staging (use promote_pattern which handles deletion)
|
|
98
|
+
try:
|
|
99
|
+
self._short_term.promote_pattern(staged_pattern_id, self.credentials)
|
|
100
|
+
except PermissionError:
|
|
101
|
+
# If we can't promote (delete from staging), just log it
|
|
102
|
+
logger.warning("could_not_remove_from_staging", pattern_id=staged_pattern_id)
|
|
103
|
+
logger.info(
|
|
104
|
+
"pattern_promoted",
|
|
105
|
+
staged_id=staged_pattern_id,
|
|
106
|
+
long_term_id=result.get("pattern_id"),
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
return result
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
"""Short-term memory operations mixin for UnifiedMemory.
|
|
2
|
+
|
|
3
|
+
Provides working memory operations (stash/retrieve) and pattern staging.
|
|
4
|
+
|
|
5
|
+
Copyright 2025 Smart AI Memory, LLC
|
|
6
|
+
Licensed under Fair Source 0.9
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import uuid
|
|
10
|
+
from datetime import datetime
|
|
11
|
+
from typing import TYPE_CHECKING, Any
|
|
12
|
+
|
|
13
|
+
import structlog
|
|
14
|
+
|
|
15
|
+
if TYPE_CHECKING:
|
|
16
|
+
from ..file_session import FileSessionMemory
|
|
17
|
+
from ..redis_bootstrap import RedisStatus
|
|
18
|
+
from ..short_term import (
|
|
19
|
+
AccessTier,
|
|
20
|
+
AgentCredentials,
|
|
21
|
+
RedisShortTermMemory,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
logger = structlog.get_logger(__name__)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class ShortTermOperationsMixin:
|
|
28
|
+
"""Mixin providing short-term memory operations for UnifiedMemory."""
|
|
29
|
+
|
|
30
|
+
# Type hints for attributes that will be provided by UnifiedMemory
|
|
31
|
+
user_id: str
|
|
32
|
+
access_tier: "AccessTier"
|
|
33
|
+
_file_session: "FileSessionMemory | None"
|
|
34
|
+
_short_term: "RedisShortTermMemory | None"
|
|
35
|
+
_redis_status: "RedisStatus | None"
|
|
36
|
+
config: Any # MemoryConfig
|
|
37
|
+
|
|
38
|
+
@property
|
|
39
|
+
def credentials(self) -> "AgentCredentials":
|
|
40
|
+
"""Get agent credentials for short-term memory operations."""
|
|
41
|
+
from ..short_term import AgentCredentials
|
|
42
|
+
|
|
43
|
+
return AgentCredentials(agent_id=self.user_id, tier=self.access_tier)
|
|
44
|
+
|
|
45
|
+
# =========================================================================
|
|
46
|
+
# SHORT-TERM MEMORY OPERATIONS (Working Memory)
|
|
47
|
+
# =========================================================================
|
|
48
|
+
|
|
49
|
+
def stash(self, key: str, value: Any, ttl_seconds: int | None = None) -> bool:
|
|
50
|
+
"""Store data in working memory with TTL.
|
|
51
|
+
|
|
52
|
+
Uses file-based session as primary storage, with optional Redis for
|
|
53
|
+
real-time features. Data is persisted to disk automatically.
|
|
54
|
+
|
|
55
|
+
Args:
|
|
56
|
+
key: Storage key
|
|
57
|
+
value: Data to store (must be JSON-serializable)
|
|
58
|
+
ttl_seconds: Time-to-live in seconds (default from config)
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
True if stored successfully
|
|
62
|
+
|
|
63
|
+
"""
|
|
64
|
+
from ..short_term import TTLStrategy
|
|
65
|
+
|
|
66
|
+
ttl = ttl_seconds or self.config.default_ttl_seconds
|
|
67
|
+
|
|
68
|
+
# Primary: File session memory (always available)
|
|
69
|
+
if self._file_session:
|
|
70
|
+
self._file_session.stash(key, value, ttl=ttl)
|
|
71
|
+
|
|
72
|
+
# Optional: Redis for real-time sync
|
|
73
|
+
if self._short_term and self._redis_status and self._redis_status.available:
|
|
74
|
+
# Map ttl_seconds to TTLStrategy
|
|
75
|
+
ttl_strategy = TTLStrategy.WORKING_RESULTS
|
|
76
|
+
if ttl_seconds is not None:
|
|
77
|
+
# COORDINATION removed in v5.0 - use SESSION for short-lived data
|
|
78
|
+
if ttl_seconds <= TTLStrategy.SESSION.value:
|
|
79
|
+
ttl_strategy = TTLStrategy.SESSION
|
|
80
|
+
elif ttl_seconds <= TTLStrategy.WORKING_RESULTS.value:
|
|
81
|
+
ttl_strategy = TTLStrategy.WORKING_RESULTS
|
|
82
|
+
elif ttl_seconds <= TTLStrategy.STAGED_PATTERNS.value:
|
|
83
|
+
ttl_strategy = TTLStrategy.STAGED_PATTERNS
|
|
84
|
+
else:
|
|
85
|
+
ttl_strategy = TTLStrategy.CONFLICT_CONTEXT
|
|
86
|
+
|
|
87
|
+
try:
|
|
88
|
+
self._short_term.stash(key, value, self.credentials, ttl_strategy)
|
|
89
|
+
except Exception as e:
|
|
90
|
+
logger.debug("redis_stash_failed", key=key, error=str(e))
|
|
91
|
+
|
|
92
|
+
# Return True if at least one backend succeeded
|
|
93
|
+
return self._file_session is not None
|
|
94
|
+
|
|
95
|
+
def retrieve(self, key: str) -> Any | None:
|
|
96
|
+
"""Retrieve data from working memory.
|
|
97
|
+
|
|
98
|
+
Checks Redis first (if available) for faster access, then falls back
|
|
99
|
+
to file-based session storage.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
key: Storage key
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
Stored data or None if not found
|
|
106
|
+
|
|
107
|
+
"""
|
|
108
|
+
# Try Redis first (faster, if available)
|
|
109
|
+
if self._short_term and self._redis_status and self._redis_status.available:
|
|
110
|
+
try:
|
|
111
|
+
result = self._short_term.retrieve(key, self.credentials)
|
|
112
|
+
if result is not None:
|
|
113
|
+
return result
|
|
114
|
+
except Exception as e:
|
|
115
|
+
logger.debug("redis_retrieve_failed", key=key, error=str(e))
|
|
116
|
+
|
|
117
|
+
# Fall back to file session (primary storage)
|
|
118
|
+
if self._file_session:
|
|
119
|
+
return self._file_session.retrieve(key)
|
|
120
|
+
|
|
121
|
+
return None
|
|
122
|
+
|
|
123
|
+
# =========================================================================
|
|
124
|
+
# PATTERN STAGING
|
|
125
|
+
# =========================================================================
|
|
126
|
+
|
|
127
|
+
def stage_pattern(
|
|
128
|
+
self,
|
|
129
|
+
pattern_data: dict[str, Any],
|
|
130
|
+
pattern_type: str = "general",
|
|
131
|
+
ttl_hours: int = 24,
|
|
132
|
+
) -> str | None:
|
|
133
|
+
"""Stage a pattern for validation before long-term storage.
|
|
134
|
+
|
|
135
|
+
Args:
|
|
136
|
+
pattern_data: Pattern content and metadata
|
|
137
|
+
pattern_type: Type of pattern (algorithm, protocol, etc.)
|
|
138
|
+
ttl_hours: Hours before staged pattern expires (not used in current impl)
|
|
139
|
+
|
|
140
|
+
Returns:
|
|
141
|
+
Staged pattern ID or None if failed
|
|
142
|
+
|
|
143
|
+
"""
|
|
144
|
+
from ..short_term import StagedPattern
|
|
145
|
+
|
|
146
|
+
if not self._short_term:
|
|
147
|
+
logger.warning("short_term_memory_unavailable")
|
|
148
|
+
return None
|
|
149
|
+
|
|
150
|
+
# Create a StagedPattern object from the pattern_data dict
|
|
151
|
+
pattern_id = f"staged_{uuid.uuid4().hex[:12]}"
|
|
152
|
+
staged_pattern = StagedPattern(
|
|
153
|
+
pattern_id=pattern_id,
|
|
154
|
+
agent_id=self.user_id,
|
|
155
|
+
pattern_type=pattern_type,
|
|
156
|
+
name=pattern_data.get("name", f"Pattern {pattern_id[:8]}"),
|
|
157
|
+
description=pattern_data.get("description", ""),
|
|
158
|
+
code=pattern_data.get("code"),
|
|
159
|
+
context=pattern_data.get("context", {}),
|
|
160
|
+
confidence=pattern_data.get("confidence", 0.5),
|
|
161
|
+
staged_at=datetime.now(),
|
|
162
|
+
interests=pattern_data.get("interests", []),
|
|
163
|
+
)
|
|
164
|
+
# Store content in context if provided
|
|
165
|
+
if "content" in pattern_data:
|
|
166
|
+
staged_pattern.context["content"] = pattern_data["content"]
|
|
167
|
+
|
|
168
|
+
success = self._short_term.stage_pattern(staged_pattern, self.credentials)
|
|
169
|
+
return pattern_id if success else None
|
|
170
|
+
|
|
171
|
+
def get_staged_patterns(self) -> list[dict]:
|
|
172
|
+
"""Get all staged patterns awaiting validation.
|
|
173
|
+
|
|
174
|
+
Returns:
|
|
175
|
+
List of staged patterns with metadata
|
|
176
|
+
|
|
177
|
+
"""
|
|
178
|
+
if not self._short_term:
|
|
179
|
+
return []
|
|
180
|
+
|
|
181
|
+
staged_list = self._short_term.list_staged_patterns(self.credentials)
|
|
182
|
+
return [p.to_dict() for p in staged_list]
|
empathy_os/memory/short_term.py
CHANGED
|
@@ -23,6 +23,7 @@ Licensed under Fair Source 0.9
|
|
|
23
23
|
"""
|
|
24
24
|
|
|
25
25
|
import json
|
|
26
|
+
import os
|
|
26
27
|
import threading
|
|
27
28
|
import time
|
|
28
29
|
from collections.abc import Callable
|
|
@@ -138,11 +139,25 @@ class RedisShortTermMemory:
|
|
|
138
139
|
if config is not None:
|
|
139
140
|
self._config = config
|
|
140
141
|
else:
|
|
142
|
+
# Check environment variable for Redis enablement (default: disabled)
|
|
143
|
+
redis_enabled = os.getenv("REDIS_ENABLED", "false").lower() in ("true", "1", "yes")
|
|
144
|
+
|
|
145
|
+
# Use environment variables for configuration if available
|
|
146
|
+
env_host = os.getenv("REDIS_HOST", host)
|
|
147
|
+
env_port = int(os.getenv("REDIS_PORT", str(port)))
|
|
148
|
+
env_db = int(os.getenv("REDIS_DB", str(db)))
|
|
149
|
+
env_password = os.getenv("REDIS_PASSWORD", password)
|
|
150
|
+
|
|
151
|
+
# If Redis is not enabled via env var, force mock mode
|
|
152
|
+
if not redis_enabled and not use_mock:
|
|
153
|
+
use_mock = True
|
|
154
|
+
logger.info("redis_disabled_via_env", message="Redis not enabled in environment, using mock mode")
|
|
155
|
+
|
|
141
156
|
self._config = RedisConfig(
|
|
142
|
-
host=
|
|
143
|
-
port=
|
|
144
|
-
db=
|
|
145
|
-
password=
|
|
157
|
+
host=env_host,
|
|
158
|
+
port=env_port,
|
|
159
|
+
db=env_db,
|
|
160
|
+
password=env_password if env_password else None,
|
|
146
161
|
use_mock=use_mock,
|
|
147
162
|
)
|
|
148
163
|
|
|
@@ -193,6 +208,33 @@ class RedisShortTermMemory:
|
|
|
193
208
|
else:
|
|
194
209
|
self._client = self._create_client_with_retry()
|
|
195
210
|
|
|
211
|
+
@property
|
|
212
|
+
def client(self) -> Any:
|
|
213
|
+
"""Get the Redis client instance.
|
|
214
|
+
|
|
215
|
+
Returns:
|
|
216
|
+
Redis client instance or None if using mock mode
|
|
217
|
+
|
|
218
|
+
Example:
|
|
219
|
+
>>> memory = RedisShortTermMemory()
|
|
220
|
+
>>> if memory.client:
|
|
221
|
+
... print("Redis connected")
|
|
222
|
+
"""
|
|
223
|
+
return self._client
|
|
224
|
+
|
|
225
|
+
@property
|
|
226
|
+
def metrics(self) -> "RedisMetrics":
|
|
227
|
+
"""Get Redis metrics instance.
|
|
228
|
+
|
|
229
|
+
Returns:
|
|
230
|
+
RedisMetrics instance with connection and operation statistics
|
|
231
|
+
|
|
232
|
+
Example:
|
|
233
|
+
>>> memory = RedisShortTermMemory()
|
|
234
|
+
>>> print(f"Retries: {memory.metrics.retries_total}")
|
|
235
|
+
"""
|
|
236
|
+
return self._metrics
|
|
237
|
+
|
|
196
238
|
def _create_client_with_retry(self) -> Any:
|
|
197
239
|
"""Create Redis client with retry logic."""
|
|
198
240
|
max_attempts = self._config.retry_max_attempts
|
|
@@ -560,7 +602,7 @@ class RedisShortTermMemory:
|
|
|
560
602
|
"""
|
|
561
603
|
# Pattern 1: String ID validation
|
|
562
604
|
if not key or not key.strip():
|
|
563
|
-
raise ValueError("key cannot be empty")
|
|
605
|
+
raise ValueError(f"key cannot be empty. Got: {key!r}")
|
|
564
606
|
|
|
565
607
|
if not credentials.can_stage():
|
|
566
608
|
raise PermissionError(
|
|
@@ -612,7 +654,7 @@ class RedisShortTermMemory:
|
|
|
612
654
|
"""
|
|
613
655
|
# Pattern 1: String ID validation
|
|
614
656
|
if not key or not key.strip():
|
|
615
|
-
raise ValueError("key cannot be empty")
|
|
657
|
+
raise ValueError(f"key cannot be empty. Got: {key!r}")
|
|
616
658
|
|
|
617
659
|
owner = agent_id or credentials.agent_id
|
|
618
660
|
full_key = f"{self.PREFIX_WORKING}{owner}:{key}"
|
|
@@ -703,7 +745,7 @@ class RedisShortTermMemory:
|
|
|
703
745
|
"""
|
|
704
746
|
# Pattern 1: String ID validation
|
|
705
747
|
if not pattern_id or not pattern_id.strip():
|
|
706
|
-
raise ValueError("pattern_id cannot be empty")
|
|
748
|
+
raise ValueError(f"pattern_id cannot be empty. Got: {pattern_id!r}")
|
|
707
749
|
|
|
708
750
|
key = f"{self.PREFIX_STAGED}{pattern_id}"
|
|
709
751
|
raw = self._get(key)
|
|
@@ -824,7 +866,7 @@ class RedisShortTermMemory:
|
|
|
824
866
|
"""
|
|
825
867
|
# Pattern 1: String ID validation
|
|
826
868
|
if not conflict_id or not conflict_id.strip():
|
|
827
|
-
raise ValueError("conflict_id cannot be empty")
|
|
869
|
+
raise ValueError(f"conflict_id cannot be empty. Got: {conflict_id!r}")
|
|
828
870
|
|
|
829
871
|
# Pattern 5: Type validation
|
|
830
872
|
if not isinstance(positions, dict):
|
|
@@ -874,7 +916,7 @@ class RedisShortTermMemory:
|
|
|
874
916
|
"""
|
|
875
917
|
# Pattern 1: String ID validation
|
|
876
918
|
if not conflict_id or not conflict_id.strip():
|
|
877
|
-
raise ValueError("conflict_id cannot be empty")
|
|
919
|
+
raise ValueError(f"conflict_id cannot be empty. Got: {conflict_id!r}")
|
|
878
920
|
|
|
879
921
|
key = f"{self.PREFIX_CONFLICT}{conflict_id}"
|
|
880
922
|
raw = self._get(key)
|
|
@@ -949,7 +991,7 @@ class RedisShortTermMemory:
|
|
|
949
991
|
"""
|
|
950
992
|
# Pattern 1: String ID validation
|
|
951
993
|
if not session_id or not session_id.strip():
|
|
952
|
-
raise ValueError("session_id cannot be empty")
|
|
994
|
+
raise ValueError(f"session_id cannot be empty. Got: {session_id!r}")
|
|
953
995
|
|
|
954
996
|
# Pattern 5: Type validation
|
|
955
997
|
if metadata is not None and not isinstance(metadata, dict):
|
|
@@ -985,7 +1027,7 @@ class RedisShortTermMemory:
|
|
|
985
1027
|
"""
|
|
986
1028
|
# Pattern 1: String ID validation
|
|
987
1029
|
if not session_id or not session_id.strip():
|
|
988
|
-
raise ValueError("session_id cannot be empty")
|
|
1030
|
+
raise ValueError(f"session_id cannot be empty. Got: {session_id!r}")
|
|
989
1031
|
|
|
990
1032
|
key = f"{self.PREFIX_SESSION}{session_id}"
|
|
991
1033
|
raw = self._get(key)
|
|
@@ -2009,7 +2051,7 @@ class RedisShortTermMemory:
|
|
|
2009
2051
|
"""
|
|
2010
2052
|
# Pattern 1: String ID validation
|
|
2011
2053
|
if not pattern_id or not pattern_id.strip():
|
|
2012
|
-
raise ValueError("pattern_id cannot be empty")
|
|
2054
|
+
raise ValueError(f"pattern_id cannot be empty. Got: {pattern_id!r}")
|
|
2013
2055
|
|
|
2014
2056
|
# Pattern 4: Range validation
|
|
2015
2057
|
if not 0.0 <= min_confidence <= 1.0:
|
|
@@ -2034,6 +2076,9 @@ class RedisShortTermMemory:
|
|
|
2034
2076
|
f"Confidence {pattern.confidence} below threshold {min_confidence}",
|
|
2035
2077
|
)
|
|
2036
2078
|
del self._mock_storage[key]
|
|
2079
|
+
# Also invalidate local cache
|
|
2080
|
+
if key in self._local_cache:
|
|
2081
|
+
del self._local_cache[key]
|
|
2037
2082
|
return True, pattern, "Pattern promoted successfully"
|
|
2038
2083
|
|
|
2039
2084
|
if self._client is None:
|
|
@@ -2063,6 +2108,10 @@ class RedisShortTermMemory:
|
|
|
2063
2108
|
pipe.delete(key)
|
|
2064
2109
|
pipe.execute()
|
|
2065
2110
|
|
|
2111
|
+
# Also invalidate local cache
|
|
2112
|
+
if key in self._local_cache:
|
|
2113
|
+
del self._local_cache[key]
|
|
2114
|
+
|
|
2066
2115
|
return True, pattern, "Pattern promoted successfully"
|
|
2067
2116
|
|
|
2068
2117
|
except redis.WatchError:
|