empathy-framework 5.1.0__py3-none-any.whl → 5.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {empathy_framework-5.1.0.dist-info → empathy_framework-5.2.1.dist-info}/METADATA +52 -3
- {empathy_framework-5.1.0.dist-info → empathy_framework-5.2.1.dist-info}/RECORD +71 -30
- empathy_os/__init__.py +1 -1
- empathy_os/cli_router.py +21 -0
- empathy_os/core_modules/__init__.py +15 -0
- empathy_os/mcp/__init__.py +10 -0
- empathy_os/mcp/server.py +506 -0
- empathy_os/memory/control_panel.py +1 -131
- empathy_os/memory/control_panel_support.py +145 -0
- empathy_os/memory/encryption.py +159 -0
- empathy_os/memory/long_term.py +41 -626
- empathy_os/memory/long_term_types.py +99 -0
- empathy_os/memory/mixins/__init__.py +25 -0
- empathy_os/memory/mixins/backend_init_mixin.py +244 -0
- empathy_os/memory/mixins/capabilities_mixin.py +199 -0
- empathy_os/memory/mixins/handoff_mixin.py +208 -0
- empathy_os/memory/mixins/lifecycle_mixin.py +49 -0
- empathy_os/memory/mixins/long_term_mixin.py +352 -0
- empathy_os/memory/mixins/promotion_mixin.py +109 -0
- empathy_os/memory/mixins/short_term_mixin.py +182 -0
- empathy_os/memory/short_term.py +7 -0
- empathy_os/memory/simple_storage.py +302 -0
- empathy_os/memory/storage_backend.py +167 -0
- empathy_os/memory/unified.py +21 -1120
- empathy_os/meta_workflows/cli_commands/__init__.py +56 -0
- empathy_os/meta_workflows/cli_commands/agent_commands.py +321 -0
- empathy_os/meta_workflows/cli_commands/analytics_commands.py +442 -0
- empathy_os/meta_workflows/cli_commands/config_commands.py +232 -0
- empathy_os/meta_workflows/cli_commands/memory_commands.py +182 -0
- empathy_os/meta_workflows/cli_commands/template_commands.py +354 -0
- empathy_os/meta_workflows/cli_commands/workflow_commands.py +382 -0
- empathy_os/meta_workflows/cli_meta_workflows.py +52 -1802
- empathy_os/meta_workflows/intent_detector.py +71 -0
- empathy_os/models/telemetry/__init__.py +71 -0
- empathy_os/models/telemetry/analytics.py +594 -0
- empathy_os/models/telemetry/backend.py +196 -0
- empathy_os/models/telemetry/data_models.py +431 -0
- empathy_os/models/telemetry/storage.py +489 -0
- empathy_os/orchestration/__init__.py +35 -0
- empathy_os/orchestration/execution_strategies.py +481 -0
- empathy_os/orchestration/meta_orchestrator.py +488 -1
- empathy_os/routing/workflow_registry.py +36 -0
- empathy_os/telemetry/cli.py +19 -724
- empathy_os/telemetry/commands/__init__.py +14 -0
- empathy_os/telemetry/commands/dashboard_commands.py +696 -0
- empathy_os/tools.py +183 -0
- empathy_os/workflows/__init__.py +5 -0
- empathy_os/workflows/autonomous_test_gen.py +860 -161
- empathy_os/workflows/base.py +6 -2
- empathy_os/workflows/code_review.py +4 -1
- empathy_os/workflows/document_gen/__init__.py +25 -0
- empathy_os/workflows/document_gen/config.py +30 -0
- empathy_os/workflows/document_gen/report_formatter.py +162 -0
- empathy_os/workflows/document_gen/workflow.py +1426 -0
- empathy_os/workflows/document_gen.py +22 -1598
- empathy_os/workflows/security_audit.py +2 -2
- empathy_os/workflows/security_audit_phase3.py +7 -4
- empathy_os/workflows/seo_optimization.py +633 -0
- empathy_os/workflows/test_gen/__init__.py +52 -0
- empathy_os/workflows/test_gen/ast_analyzer.py +249 -0
- empathy_os/workflows/test_gen/config.py +88 -0
- empathy_os/workflows/test_gen/data_models.py +38 -0
- empathy_os/workflows/test_gen/report_formatter.py +289 -0
- empathy_os/workflows/test_gen/test_templates.py +381 -0
- empathy_os/workflows/test_gen/workflow.py +655 -0
- empathy_os/workflows/test_gen.py +42 -1905
- empathy_os/memory/types 2.py +0 -441
- empathy_os/models/telemetry.py +0 -1660
- {empathy_framework-5.1.0.dist-info → empathy_framework-5.2.1.dist-info}/WHEEL +0 -0
- {empathy_framework-5.1.0.dist-info → empathy_framework-5.2.1.dist-info}/entry_points.txt +0 -0
- {empathy_framework-5.1.0.dist-info → empathy_framework-5.2.1.dist-info}/licenses/LICENSE +0 -0
- {empathy_framework-5.1.0.dist-info → empathy_framework-5.2.1.dist-info}/licenses/LICENSE_CHANGE_ANNOUNCEMENT.md +0 -0
- {empathy_framework-5.1.0.dist-info → empathy_framework-5.2.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
"""Pattern promotion mixin for UnifiedMemory.
|
|
2
|
+
|
|
3
|
+
Handles promotion of patterns from short-term to long-term memory.
|
|
4
|
+
|
|
5
|
+
Copyright 2025 Smart AI Memory, LLC
|
|
6
|
+
Licensed under Fair Source 0.9
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from typing import TYPE_CHECKING, Any
|
|
10
|
+
|
|
11
|
+
import structlog
|
|
12
|
+
|
|
13
|
+
if TYPE_CHECKING:
|
|
14
|
+
from ..long_term import Classification
|
|
15
|
+
from ..short_term import AgentCredentials, RedisShortTermMemory
|
|
16
|
+
|
|
17
|
+
logger = structlog.get_logger(__name__)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class PatternPromotionMixin:
|
|
21
|
+
"""Mixin providing pattern promotion capabilities for UnifiedMemory."""
|
|
22
|
+
|
|
23
|
+
# Type hints for attributes that will be provided by UnifiedMemory
|
|
24
|
+
_short_term: "RedisShortTermMemory | None"
|
|
25
|
+
_long_term: Any # SecureMemDocsIntegration | None
|
|
26
|
+
|
|
27
|
+
# Needs access to methods from other mixins
|
|
28
|
+
@property
|
|
29
|
+
def credentials(self) -> "AgentCredentials":
|
|
30
|
+
"""Get credentials - provided by ShortTermOperationsMixin."""
|
|
31
|
+
...
|
|
32
|
+
|
|
33
|
+
def get_staged_patterns(self) -> list[dict]:
|
|
34
|
+
"""Get staged patterns - provided by ShortTermOperationsMixin."""
|
|
35
|
+
...
|
|
36
|
+
|
|
37
|
+
def persist_pattern(
|
|
38
|
+
self,
|
|
39
|
+
content: str,
|
|
40
|
+
pattern_type: str,
|
|
41
|
+
classification: "Classification | str | None" = None,
|
|
42
|
+
auto_classify: bool = True,
|
|
43
|
+
metadata: dict[str, Any] | None = None,
|
|
44
|
+
) -> dict[str, Any] | None:
|
|
45
|
+
"""Persist pattern - provided by LongTermOperationsMixin."""
|
|
46
|
+
...
|
|
47
|
+
|
|
48
|
+
# =========================================================================
|
|
49
|
+
# PATTERN PROMOTION (SHORT-TERM → LONG-TERM)
|
|
50
|
+
# =========================================================================
|
|
51
|
+
|
|
52
|
+
def promote_pattern(
|
|
53
|
+
self,
|
|
54
|
+
staged_pattern_id: str,
|
|
55
|
+
classification: "Classification | str | None" = None,
|
|
56
|
+
auto_classify: bool = True,
|
|
57
|
+
) -> dict[str, Any] | None:
|
|
58
|
+
"""Promote a staged pattern from short-term to long-term memory.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
staged_pattern_id: ID of staged pattern to promote
|
|
62
|
+
classification: Override classification (or auto-detect)
|
|
63
|
+
auto_classify: Auto-detect classification from content
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
Long-term storage result, or None if failed
|
|
67
|
+
|
|
68
|
+
"""
|
|
69
|
+
if not self._short_term or not self._long_term:
|
|
70
|
+
logger.error("memory_backends_unavailable")
|
|
71
|
+
return None
|
|
72
|
+
|
|
73
|
+
# Retrieve staged pattern
|
|
74
|
+
staged_patterns = self.get_staged_patterns()
|
|
75
|
+
staged = next(
|
|
76
|
+
(p for p in staged_patterns if p.get("pattern_id") == staged_pattern_id),
|
|
77
|
+
None,
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
if not staged:
|
|
81
|
+
logger.warning("staged_pattern_not_found", pattern_id=staged_pattern_id)
|
|
82
|
+
return None
|
|
83
|
+
|
|
84
|
+
# Persist to long-term storage
|
|
85
|
+
# Content is stored in context dict by stage_pattern
|
|
86
|
+
context = staged.get("context", {})
|
|
87
|
+
content = context.get("content", "") or staged.get("description", "")
|
|
88
|
+
result = self.persist_pattern(
|
|
89
|
+
content=content,
|
|
90
|
+
pattern_type=staged.get("pattern_type", "general"),
|
|
91
|
+
classification=classification,
|
|
92
|
+
auto_classify=auto_classify,
|
|
93
|
+
metadata=context,
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
if result:
|
|
97
|
+
# Remove from staging (use promote_pattern which handles deletion)
|
|
98
|
+
try:
|
|
99
|
+
self._short_term.promote_pattern(staged_pattern_id, self.credentials)
|
|
100
|
+
except PermissionError:
|
|
101
|
+
# If we can't promote (delete from staging), just log it
|
|
102
|
+
logger.warning("could_not_remove_from_staging", pattern_id=staged_pattern_id)
|
|
103
|
+
logger.info(
|
|
104
|
+
"pattern_promoted",
|
|
105
|
+
staged_id=staged_pattern_id,
|
|
106
|
+
long_term_id=result.get("pattern_id"),
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
return result
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
"""Short-term memory operations mixin for UnifiedMemory.
|
|
2
|
+
|
|
3
|
+
Provides working memory operations (stash/retrieve) and pattern staging.
|
|
4
|
+
|
|
5
|
+
Copyright 2025 Smart AI Memory, LLC
|
|
6
|
+
Licensed under Fair Source 0.9
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import uuid
|
|
10
|
+
from datetime import datetime
|
|
11
|
+
from typing import TYPE_CHECKING, Any
|
|
12
|
+
|
|
13
|
+
import structlog
|
|
14
|
+
|
|
15
|
+
if TYPE_CHECKING:
|
|
16
|
+
from ..file_session import FileSessionMemory
|
|
17
|
+
from ..redis_bootstrap import RedisStatus
|
|
18
|
+
from ..short_term import (
|
|
19
|
+
AccessTier,
|
|
20
|
+
AgentCredentials,
|
|
21
|
+
RedisShortTermMemory,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
logger = structlog.get_logger(__name__)
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class ShortTermOperationsMixin:
|
|
28
|
+
"""Mixin providing short-term memory operations for UnifiedMemory."""
|
|
29
|
+
|
|
30
|
+
# Type hints for attributes that will be provided by UnifiedMemory
|
|
31
|
+
user_id: str
|
|
32
|
+
access_tier: "AccessTier"
|
|
33
|
+
_file_session: "FileSessionMemory | None"
|
|
34
|
+
_short_term: "RedisShortTermMemory | None"
|
|
35
|
+
_redis_status: "RedisStatus | None"
|
|
36
|
+
config: Any # MemoryConfig
|
|
37
|
+
|
|
38
|
+
@property
|
|
39
|
+
def credentials(self) -> "AgentCredentials":
|
|
40
|
+
"""Get agent credentials for short-term memory operations."""
|
|
41
|
+
from ..short_term import AgentCredentials
|
|
42
|
+
|
|
43
|
+
return AgentCredentials(agent_id=self.user_id, tier=self.access_tier)
|
|
44
|
+
|
|
45
|
+
# =========================================================================
|
|
46
|
+
# SHORT-TERM MEMORY OPERATIONS (Working Memory)
|
|
47
|
+
# =========================================================================
|
|
48
|
+
|
|
49
|
+
def stash(self, key: str, value: Any, ttl_seconds: int | None = None) -> bool:
|
|
50
|
+
"""Store data in working memory with TTL.
|
|
51
|
+
|
|
52
|
+
Uses file-based session as primary storage, with optional Redis for
|
|
53
|
+
real-time features. Data is persisted to disk automatically.
|
|
54
|
+
|
|
55
|
+
Args:
|
|
56
|
+
key: Storage key
|
|
57
|
+
value: Data to store (must be JSON-serializable)
|
|
58
|
+
ttl_seconds: Time-to-live in seconds (default from config)
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
True if stored successfully
|
|
62
|
+
|
|
63
|
+
"""
|
|
64
|
+
from ..short_term import TTLStrategy
|
|
65
|
+
|
|
66
|
+
ttl = ttl_seconds or self.config.default_ttl_seconds
|
|
67
|
+
|
|
68
|
+
# Primary: File session memory (always available)
|
|
69
|
+
if self._file_session:
|
|
70
|
+
self._file_session.stash(key, value, ttl=ttl)
|
|
71
|
+
|
|
72
|
+
# Optional: Redis for real-time sync
|
|
73
|
+
if self._short_term and self._redis_status and self._redis_status.available:
|
|
74
|
+
# Map ttl_seconds to TTLStrategy
|
|
75
|
+
ttl_strategy = TTLStrategy.WORKING_RESULTS
|
|
76
|
+
if ttl_seconds is not None:
|
|
77
|
+
# COORDINATION removed in v5.0 - use SESSION for short-lived data
|
|
78
|
+
if ttl_seconds <= TTLStrategy.SESSION.value:
|
|
79
|
+
ttl_strategy = TTLStrategy.SESSION
|
|
80
|
+
elif ttl_seconds <= TTLStrategy.WORKING_RESULTS.value:
|
|
81
|
+
ttl_strategy = TTLStrategy.WORKING_RESULTS
|
|
82
|
+
elif ttl_seconds <= TTLStrategy.STAGED_PATTERNS.value:
|
|
83
|
+
ttl_strategy = TTLStrategy.STAGED_PATTERNS
|
|
84
|
+
else:
|
|
85
|
+
ttl_strategy = TTLStrategy.CONFLICT_CONTEXT
|
|
86
|
+
|
|
87
|
+
try:
|
|
88
|
+
self._short_term.stash(key, value, self.credentials, ttl_strategy)
|
|
89
|
+
except Exception as e:
|
|
90
|
+
logger.debug("redis_stash_failed", key=key, error=str(e))
|
|
91
|
+
|
|
92
|
+
# Return True if at least one backend succeeded
|
|
93
|
+
return self._file_session is not None
|
|
94
|
+
|
|
95
|
+
def retrieve(self, key: str) -> Any | None:
|
|
96
|
+
"""Retrieve data from working memory.
|
|
97
|
+
|
|
98
|
+
Checks Redis first (if available) for faster access, then falls back
|
|
99
|
+
to file-based session storage.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
key: Storage key
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
Stored data or None if not found
|
|
106
|
+
|
|
107
|
+
"""
|
|
108
|
+
# Try Redis first (faster, if available)
|
|
109
|
+
if self._short_term and self._redis_status and self._redis_status.available:
|
|
110
|
+
try:
|
|
111
|
+
result = self._short_term.retrieve(key, self.credentials)
|
|
112
|
+
if result is not None:
|
|
113
|
+
return result
|
|
114
|
+
except Exception as e:
|
|
115
|
+
logger.debug("redis_retrieve_failed", key=key, error=str(e))
|
|
116
|
+
|
|
117
|
+
# Fall back to file session (primary storage)
|
|
118
|
+
if self._file_session:
|
|
119
|
+
return self._file_session.retrieve(key)
|
|
120
|
+
|
|
121
|
+
return None
|
|
122
|
+
|
|
123
|
+
# =========================================================================
|
|
124
|
+
# PATTERN STAGING
|
|
125
|
+
# =========================================================================
|
|
126
|
+
|
|
127
|
+
def stage_pattern(
|
|
128
|
+
self,
|
|
129
|
+
pattern_data: dict[str, Any],
|
|
130
|
+
pattern_type: str = "general",
|
|
131
|
+
ttl_hours: int = 24,
|
|
132
|
+
) -> str | None:
|
|
133
|
+
"""Stage a pattern for validation before long-term storage.
|
|
134
|
+
|
|
135
|
+
Args:
|
|
136
|
+
pattern_data: Pattern content and metadata
|
|
137
|
+
pattern_type: Type of pattern (algorithm, protocol, etc.)
|
|
138
|
+
ttl_hours: Hours before staged pattern expires (not used in current impl)
|
|
139
|
+
|
|
140
|
+
Returns:
|
|
141
|
+
Staged pattern ID or None if failed
|
|
142
|
+
|
|
143
|
+
"""
|
|
144
|
+
from ..short_term import StagedPattern
|
|
145
|
+
|
|
146
|
+
if not self._short_term:
|
|
147
|
+
logger.warning("short_term_memory_unavailable")
|
|
148
|
+
return None
|
|
149
|
+
|
|
150
|
+
# Create a StagedPattern object from the pattern_data dict
|
|
151
|
+
pattern_id = f"staged_{uuid.uuid4().hex[:12]}"
|
|
152
|
+
staged_pattern = StagedPattern(
|
|
153
|
+
pattern_id=pattern_id,
|
|
154
|
+
agent_id=self.user_id,
|
|
155
|
+
pattern_type=pattern_type,
|
|
156
|
+
name=pattern_data.get("name", f"Pattern {pattern_id[:8]}"),
|
|
157
|
+
description=pattern_data.get("description", ""),
|
|
158
|
+
code=pattern_data.get("code"),
|
|
159
|
+
context=pattern_data.get("context", {}),
|
|
160
|
+
confidence=pattern_data.get("confidence", 0.5),
|
|
161
|
+
staged_at=datetime.now(),
|
|
162
|
+
interests=pattern_data.get("interests", []),
|
|
163
|
+
)
|
|
164
|
+
# Store content in context if provided
|
|
165
|
+
if "content" in pattern_data:
|
|
166
|
+
staged_pattern.context["content"] = pattern_data["content"]
|
|
167
|
+
|
|
168
|
+
success = self._short_term.stage_pattern(staged_pattern, self.credentials)
|
|
169
|
+
return pattern_id if success else None
|
|
170
|
+
|
|
171
|
+
def get_staged_patterns(self) -> list[dict]:
|
|
172
|
+
"""Get all staged patterns awaiting validation.
|
|
173
|
+
|
|
174
|
+
Returns:
|
|
175
|
+
List of staged patterns with metadata
|
|
176
|
+
|
|
177
|
+
"""
|
|
178
|
+
if not self._short_term:
|
|
179
|
+
return []
|
|
180
|
+
|
|
181
|
+
staged_list = self._short_term.list_staged_patterns(self.credentials)
|
|
182
|
+
return [p.to_dict() for p in staged_list]
|
empathy_os/memory/short_term.py
CHANGED
|
@@ -2034,6 +2034,9 @@ class RedisShortTermMemory:
|
|
|
2034
2034
|
f"Confidence {pattern.confidence} below threshold {min_confidence}",
|
|
2035
2035
|
)
|
|
2036
2036
|
del self._mock_storage[key]
|
|
2037
|
+
# Also invalidate local cache
|
|
2038
|
+
if key in self._local_cache:
|
|
2039
|
+
del self._local_cache[key]
|
|
2037
2040
|
return True, pattern, "Pattern promoted successfully"
|
|
2038
2041
|
|
|
2039
2042
|
if self._client is None:
|
|
@@ -2063,6 +2066,10 @@ class RedisShortTermMemory:
|
|
|
2063
2066
|
pipe.delete(key)
|
|
2064
2067
|
pipe.execute()
|
|
2065
2068
|
|
|
2069
|
+
# Also invalidate local cache
|
|
2070
|
+
if key in self._local_cache:
|
|
2071
|
+
del self._local_cache[key]
|
|
2072
|
+
|
|
2066
2073
|
return True, pattern, "Pattern promoted successfully"
|
|
2067
2074
|
|
|
2068
2075
|
except redis.WatchError:
|
|
@@ -0,0 +1,302 @@
|
|
|
1
|
+
"""Simplified long-term storage without security pipeline
|
|
2
|
+
|
|
3
|
+
Provides basic CRUD operations for long-term persistent storage without
|
|
4
|
+
the full security pipeline of SecureMemDocsIntegration. Suitable for
|
|
5
|
+
simple storage needs where PII scrubbing and encryption are not required.
|
|
6
|
+
|
|
7
|
+
Extracted from long_term.py for better modularity and testability.
|
|
8
|
+
|
|
9
|
+
Features:
|
|
10
|
+
- JSON file-based storage
|
|
11
|
+
- Classification support (PUBLIC/INTERNAL/SENSITIVE)
|
|
12
|
+
- Simple key-value interface
|
|
13
|
+
- List keys by classification
|
|
14
|
+
- Path validation for security
|
|
15
|
+
|
|
16
|
+
Copyright 2025 Smart AI Memory, LLC
|
|
17
|
+
Licensed under Fair Source 0.9
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
import json
|
|
21
|
+
from datetime import datetime
|
|
22
|
+
from pathlib import Path
|
|
23
|
+
from typing import Any
|
|
24
|
+
|
|
25
|
+
import structlog
|
|
26
|
+
|
|
27
|
+
from empathy_os.config import _validate_file_path
|
|
28
|
+
|
|
29
|
+
from .long_term_types import Classification
|
|
30
|
+
|
|
31
|
+
logger = structlog.get_logger(__name__)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class LongTermMemory:
|
|
35
|
+
"""Simplified long-term persistent storage interface.
|
|
36
|
+
|
|
37
|
+
Provides basic CRUD operations for long-term memory storage without
|
|
38
|
+
the full security pipeline of SecureMemDocsIntegration. Suitable for
|
|
39
|
+
simple persistent storage needs where PII scrubbing and encryption
|
|
40
|
+
are not required.
|
|
41
|
+
|
|
42
|
+
Features:
|
|
43
|
+
- JSON file-based storage
|
|
44
|
+
- Classification support (PUBLIC/INTERNAL/SENSITIVE)
|
|
45
|
+
- Simple key-value interface
|
|
46
|
+
- List keys by classification
|
|
47
|
+
|
|
48
|
+
Example:
|
|
49
|
+
>>> memory = LongTermMemory(storage_path="./data")
|
|
50
|
+
>>> memory.store("config", {"setting": "value"}, classification="INTERNAL")
|
|
51
|
+
>>> data = memory.retrieve("config")
|
|
52
|
+
>>> keys = memory.list_keys(classification="INTERNAL")
|
|
53
|
+
|
|
54
|
+
Note:
|
|
55
|
+
For enterprise features (PII scrubbing, encryption, audit logging),
|
|
56
|
+
use SecureMemDocsIntegration instead.
|
|
57
|
+
"""
|
|
58
|
+
|
|
59
|
+
def __init__(self, storage_path: str = "./long_term_storage"):
|
|
60
|
+
"""Initialize long-term memory storage.
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
storage_path: Directory path for JSON storage
|
|
64
|
+
|
|
65
|
+
"""
|
|
66
|
+
self.storage_path = Path(storage_path)
|
|
67
|
+
self.storage_path.mkdir(parents=True, exist_ok=True)
|
|
68
|
+
logger.info("long_term_memory_initialized", storage_path=str(self.storage_path))
|
|
69
|
+
|
|
70
|
+
def store(
|
|
71
|
+
self,
|
|
72
|
+
key: str,
|
|
73
|
+
data: Any,
|
|
74
|
+
classification: str | Classification | None = None,
|
|
75
|
+
) -> bool:
|
|
76
|
+
"""Store data in long-term memory.
|
|
77
|
+
|
|
78
|
+
Args:
|
|
79
|
+
key: Storage key
|
|
80
|
+
data: Data to store (must be JSON-serializable)
|
|
81
|
+
classification: Data classification (PUBLIC/INTERNAL/SENSITIVE)
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
True if stored successfully, False otherwise
|
|
85
|
+
|
|
86
|
+
Raises:
|
|
87
|
+
ValueError: If key is empty or data is not JSON-serializable
|
|
88
|
+
TypeError: If data cannot be serialized to JSON
|
|
89
|
+
|
|
90
|
+
Example:
|
|
91
|
+
>>> memory = LongTermMemory()
|
|
92
|
+
>>> memory.store("user_prefs", {"theme": "dark"}, "INTERNAL")
|
|
93
|
+
True
|
|
94
|
+
|
|
95
|
+
"""
|
|
96
|
+
if not key or not key.strip():
|
|
97
|
+
raise ValueError("key cannot be empty")
|
|
98
|
+
|
|
99
|
+
# Validate key for path traversal attacks
|
|
100
|
+
if ".." in key or key.startswith("/") or "\x00" in key:
|
|
101
|
+
logger.error("path_traversal_attempt", key=key)
|
|
102
|
+
return False
|
|
103
|
+
|
|
104
|
+
try:
|
|
105
|
+
# Convert classification to string
|
|
106
|
+
classification_str = "INTERNAL" # Default
|
|
107
|
+
if classification is not None:
|
|
108
|
+
if isinstance(classification, Classification):
|
|
109
|
+
classification_str = classification.value
|
|
110
|
+
elif isinstance(classification, str):
|
|
111
|
+
# Validate classification string
|
|
112
|
+
try:
|
|
113
|
+
Classification[classification.upper()]
|
|
114
|
+
classification_str = classification.upper()
|
|
115
|
+
except KeyError:
|
|
116
|
+
logger.warning(
|
|
117
|
+
"invalid_classification",
|
|
118
|
+
classification=classification,
|
|
119
|
+
using_default="INTERNAL",
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
# Create storage record
|
|
123
|
+
record = {
|
|
124
|
+
"key": key,
|
|
125
|
+
"data": data,
|
|
126
|
+
"classification": classification_str,
|
|
127
|
+
"created_at": datetime.utcnow().isoformat() + "Z",
|
|
128
|
+
"updated_at": datetime.utcnow().isoformat() + "Z",
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
# Store to JSON file
|
|
132
|
+
file_path = self.storage_path / f"{key}.json"
|
|
133
|
+
validated_file_path = _validate_file_path(str(file_path))
|
|
134
|
+
with validated_file_path.open("w", encoding="utf-8") as f:
|
|
135
|
+
json.dump(record, f, indent=2)
|
|
136
|
+
|
|
137
|
+
logger.debug("data_stored", key=key, classification=classification_str)
|
|
138
|
+
return True
|
|
139
|
+
|
|
140
|
+
except (TypeError, ValueError) as e:
|
|
141
|
+
logger.error("store_failed", key=key, error=str(e))
|
|
142
|
+
raise
|
|
143
|
+
except (OSError, PermissionError) as e:
|
|
144
|
+
logger.error("storage_io_error", key=key, error=str(e))
|
|
145
|
+
return False
|
|
146
|
+
|
|
147
|
+
def retrieve(self, key: str) -> Any | None:
|
|
148
|
+
"""Retrieve data from long-term memory.
|
|
149
|
+
|
|
150
|
+
Args:
|
|
151
|
+
key: Storage key
|
|
152
|
+
|
|
153
|
+
Returns:
|
|
154
|
+
Stored data or None if not found
|
|
155
|
+
|
|
156
|
+
Raises:
|
|
157
|
+
ValueError: If key is empty
|
|
158
|
+
|
|
159
|
+
Example:
|
|
160
|
+
>>> memory = LongTermMemory()
|
|
161
|
+
>>> memory.store("config", {"value": 42})
|
|
162
|
+
>>> data = memory.retrieve("config")
|
|
163
|
+
>>> print(data["value"])
|
|
164
|
+
42
|
|
165
|
+
|
|
166
|
+
"""
|
|
167
|
+
if not key or not key.strip():
|
|
168
|
+
raise ValueError("key cannot be empty")
|
|
169
|
+
|
|
170
|
+
try:
|
|
171
|
+
file_path = self.storage_path / f"{key}.json"
|
|
172
|
+
|
|
173
|
+
if not file_path.exists():
|
|
174
|
+
logger.debug("key_not_found", key=key)
|
|
175
|
+
return None
|
|
176
|
+
|
|
177
|
+
with file_path.open(encoding="utf-8") as f:
|
|
178
|
+
record = json.load(f)
|
|
179
|
+
|
|
180
|
+
logger.debug("data_retrieved", key=key)
|
|
181
|
+
return record.get("data")
|
|
182
|
+
|
|
183
|
+
except (OSError, PermissionError, json.JSONDecodeError) as e:
|
|
184
|
+
logger.error("retrieve_failed", key=key, error=str(e))
|
|
185
|
+
return None
|
|
186
|
+
|
|
187
|
+
def delete(self, key: str) -> bool:
|
|
188
|
+
"""Delete data from long-term memory.
|
|
189
|
+
|
|
190
|
+
Args:
|
|
191
|
+
key: Storage key
|
|
192
|
+
|
|
193
|
+
Returns:
|
|
194
|
+
True if deleted, False if not found or error
|
|
195
|
+
|
|
196
|
+
Raises:
|
|
197
|
+
ValueError: If key is empty
|
|
198
|
+
|
|
199
|
+
Example:
|
|
200
|
+
>>> memory = LongTermMemory()
|
|
201
|
+
>>> memory.store("temp", {"data": "value"})
|
|
202
|
+
>>> memory.delete("temp")
|
|
203
|
+
True
|
|
204
|
+
|
|
205
|
+
"""
|
|
206
|
+
if not key or not key.strip():
|
|
207
|
+
raise ValueError("key cannot be empty")
|
|
208
|
+
|
|
209
|
+
try:
|
|
210
|
+
file_path = self.storage_path / f"{key}.json"
|
|
211
|
+
|
|
212
|
+
if not file_path.exists():
|
|
213
|
+
logger.debug("key_not_found_for_deletion", key=key)
|
|
214
|
+
return False
|
|
215
|
+
|
|
216
|
+
file_path.unlink()
|
|
217
|
+
logger.info("data_deleted", key=key)
|
|
218
|
+
return True
|
|
219
|
+
|
|
220
|
+
except (OSError, PermissionError) as e:
|
|
221
|
+
logger.error("delete_failed", key=key, error=str(e))
|
|
222
|
+
return False
|
|
223
|
+
|
|
224
|
+
def list_keys(self, classification: str | Classification | None = None) -> list[str]:
|
|
225
|
+
"""List all keys in long-term memory, optionally filtered by classification.
|
|
226
|
+
|
|
227
|
+
Args:
|
|
228
|
+
classification: Filter by classification (PUBLIC/INTERNAL/SENSITIVE)
|
|
229
|
+
|
|
230
|
+
Returns:
|
|
231
|
+
List of storage keys
|
|
232
|
+
|
|
233
|
+
Example:
|
|
234
|
+
>>> memory = LongTermMemory()
|
|
235
|
+
>>> memory.store("public_data", {"x": 1}, "PUBLIC")
|
|
236
|
+
>>> memory.store("internal_data", {"y": 2}, "INTERNAL")
|
|
237
|
+
>>> keys = memory.list_keys(classification="PUBLIC")
|
|
238
|
+
>>> print(keys)
|
|
239
|
+
['public_data']
|
|
240
|
+
|
|
241
|
+
"""
|
|
242
|
+
keys = []
|
|
243
|
+
|
|
244
|
+
# Convert classification to string if needed
|
|
245
|
+
filter_classification = None
|
|
246
|
+
if classification is not None:
|
|
247
|
+
if isinstance(classification, Classification):
|
|
248
|
+
filter_classification = classification.value
|
|
249
|
+
elif isinstance(classification, str):
|
|
250
|
+
try:
|
|
251
|
+
Classification[classification.upper()]
|
|
252
|
+
filter_classification = classification.upper()
|
|
253
|
+
except KeyError:
|
|
254
|
+
logger.warning("invalid_classification_filter", classification=classification)
|
|
255
|
+
return []
|
|
256
|
+
|
|
257
|
+
try:
|
|
258
|
+
for file_path in self.storage_path.glob("*.json"):
|
|
259
|
+
try:
|
|
260
|
+
with file_path.open(encoding="utf-8") as f:
|
|
261
|
+
record = json.load(f)
|
|
262
|
+
|
|
263
|
+
# Apply classification filter if specified
|
|
264
|
+
if filter_classification is not None:
|
|
265
|
+
if record.get("classification") != filter_classification:
|
|
266
|
+
continue
|
|
267
|
+
|
|
268
|
+
keys.append(record.get("key", file_path.stem))
|
|
269
|
+
|
|
270
|
+
except (OSError, json.JSONDecodeError):
|
|
271
|
+
continue
|
|
272
|
+
|
|
273
|
+
except (OSError, PermissionError) as e:
|
|
274
|
+
logger.error("list_keys_failed", error=str(e))
|
|
275
|
+
|
|
276
|
+
return keys
|
|
277
|
+
|
|
278
|
+
def clear(self) -> int:
|
|
279
|
+
"""Clear all data from long-term memory.
|
|
280
|
+
|
|
281
|
+
Returns:
|
|
282
|
+
Number of keys deleted
|
|
283
|
+
|
|
284
|
+
Warning:
|
|
285
|
+
This operation cannot be undone!
|
|
286
|
+
|
|
287
|
+
"""
|
|
288
|
+
count = 0
|
|
289
|
+
try:
|
|
290
|
+
for file_path in self.storage_path.glob("*.json"):
|
|
291
|
+
try:
|
|
292
|
+
file_path.unlink()
|
|
293
|
+
count += 1
|
|
294
|
+
except (OSError, PermissionError):
|
|
295
|
+
continue
|
|
296
|
+
|
|
297
|
+
logger.warning("long_term_memory_cleared", count=count)
|
|
298
|
+
return count
|
|
299
|
+
|
|
300
|
+
except (OSError, PermissionError) as e:
|
|
301
|
+
logger.error("clear_failed", error=str(e))
|
|
302
|
+
return count
|