superlocalmemory 2.7.6 → 2.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +120 -155
- package/README.md +115 -89
- package/api_server.py +2 -12
- package/docs/PATTERN-LEARNING.md +64 -199
- package/docs/example_graph_usage.py +4 -6
- package/install.sh +59 -0
- package/mcp_server.py +83 -7
- package/package.json +1 -8
- package/scripts/generate-thumbnails.py +3 -5
- package/skills/slm-build-graph/SKILL.md +1 -1
- package/skills/slm-list-recent/SKILL.md +1 -1
- package/skills/slm-recall/SKILL.md +1 -1
- package/skills/slm-remember/SKILL.md +1 -1
- package/skills/slm-show-patterns/SKILL.md +1 -1
- package/skills/slm-status/SKILL.md +1 -1
- package/skills/slm-switch-profile/SKILL.md +1 -1
- package/src/agent_registry.py +7 -18
- package/src/auth_middleware.py +3 -5
- package/src/auto_backup.py +3 -7
- package/src/behavioral/__init__.py +49 -0
- package/src/behavioral/behavioral_listener.py +203 -0
- package/src/behavioral/behavioral_patterns.py +275 -0
- package/src/behavioral/cross_project_transfer.py +206 -0
- package/src/behavioral/outcome_inference.py +194 -0
- package/src/behavioral/outcome_tracker.py +193 -0
- package/src/behavioral/tests/__init__.py +4 -0
- package/src/behavioral/tests/test_behavioral_integration.py +108 -0
- package/src/behavioral/tests/test_behavioral_patterns.py +150 -0
- package/src/behavioral/tests/test_cross_project_transfer.py +142 -0
- package/src/behavioral/tests/test_mcp_behavioral.py +139 -0
- package/src/behavioral/tests/test_mcp_report_outcome.py +117 -0
- package/src/behavioral/tests/test_outcome_inference.py +107 -0
- package/src/behavioral/tests/test_outcome_tracker.py +96 -0
- package/src/cache_manager.py +4 -6
- package/src/compliance/__init__.py +48 -0
- package/src/compliance/abac_engine.py +149 -0
- package/src/compliance/abac_middleware.py +116 -0
- package/src/compliance/audit_db.py +215 -0
- package/src/compliance/audit_logger.py +148 -0
- package/src/compliance/retention_manager.py +289 -0
- package/src/compliance/retention_scheduler.py +186 -0
- package/src/compliance/tests/__init__.py +4 -0
- package/src/compliance/tests/test_abac_enforcement.py +95 -0
- package/src/compliance/tests/test_abac_engine.py +124 -0
- package/src/compliance/tests/test_abac_mcp_integration.py +118 -0
- package/src/compliance/tests/test_audit_db.py +123 -0
- package/src/compliance/tests/test_audit_logger.py +98 -0
- package/src/compliance/tests/test_mcp_audit.py +128 -0
- package/src/compliance/tests/test_mcp_retention_policy.py +125 -0
- package/src/compliance/tests/test_retention_manager.py +131 -0
- package/src/compliance/tests/test_retention_scheduler.py +99 -0
- package/src/db_connection_manager.py +2 -12
- package/src/embedding_engine.py +61 -669
- package/src/embeddings/__init__.py +47 -0
- package/src/embeddings/cache.py +70 -0
- package/src/embeddings/cli.py +113 -0
- package/src/embeddings/constants.py +47 -0
- package/src/embeddings/database.py +91 -0
- package/src/embeddings/engine.py +247 -0
- package/src/embeddings/model_loader.py +145 -0
- package/src/event_bus.py +3 -13
- package/src/graph/__init__.py +36 -0
- package/src/graph/build_helpers.py +74 -0
- package/src/graph/cli.py +87 -0
- package/src/graph/cluster_builder.py +188 -0
- package/src/graph/cluster_summary.py +148 -0
- package/src/graph/constants.py +47 -0
- package/src/graph/edge_builder.py +162 -0
- package/src/graph/entity_extractor.py +95 -0
- package/src/graph/graph_core.py +226 -0
- package/src/graph/graph_search.py +231 -0
- package/src/graph/hierarchical.py +207 -0
- package/src/graph/schema.py +99 -0
- package/src/graph_engine.py +45 -1451
- package/src/hnsw_index.py +3 -7
- package/src/hybrid_search.py +36 -683
- package/src/learning/__init__.py +27 -12
- package/src/learning/adaptive_ranker.py +50 -12
- package/src/learning/cross_project_aggregator.py +2 -12
- package/src/learning/engagement_tracker.py +2 -12
- package/src/learning/feature_extractor.py +175 -43
- package/src/learning/feedback_collector.py +7 -12
- package/src/learning/learning_db.py +180 -12
- package/src/learning/project_context_manager.py +2 -12
- package/src/learning/source_quality_scorer.py +2 -12
- package/src/learning/synthetic_bootstrap.py +2 -12
- package/src/learning/tests/__init__.py +2 -0
- package/src/learning/tests/test_adaptive_ranker.py +2 -6
- package/src/learning/tests/test_adaptive_ranker_v28.py +60 -0
- package/src/learning/tests/test_aggregator.py +2 -6
- package/src/learning/tests/test_auto_retrain_v28.py +35 -0
- package/src/learning/tests/test_e2e_ranking_v28.py +82 -0
- package/src/learning/tests/test_feature_extractor_v28.py +93 -0
- package/src/learning/tests/test_feedback_collector.py +2 -6
- package/src/learning/tests/test_learning_db.py +2 -6
- package/src/learning/tests/test_learning_db_v28.py +110 -0
- package/src/learning/tests/test_learning_init_v28.py +48 -0
- package/src/learning/tests/test_outcome_signals.py +48 -0
- package/src/learning/tests/test_project_context.py +2 -6
- package/src/learning/tests/test_schema_migration.py +319 -0
- package/src/learning/tests/test_signal_inference.py +11 -13
- package/src/learning/tests/test_source_quality.py +2 -6
- package/src/learning/tests/test_synthetic_bootstrap.py +3 -7
- package/src/learning/tests/test_workflow_miner.py +2 -6
- package/src/learning/workflow_pattern_miner.py +2 -12
- package/src/lifecycle/__init__.py +54 -0
- package/src/lifecycle/bounded_growth.py +239 -0
- package/src/lifecycle/compaction_engine.py +226 -0
- package/src/lifecycle/lifecycle_engine.py +302 -0
- package/src/lifecycle/lifecycle_evaluator.py +225 -0
- package/src/lifecycle/lifecycle_scheduler.py +130 -0
- package/src/lifecycle/retention_policy.py +285 -0
- package/src/lifecycle/tests/__init__.py +4 -0
- package/src/lifecycle/tests/test_bounded_growth.py +193 -0
- package/src/lifecycle/tests/test_compaction.py +179 -0
- package/src/lifecycle/tests/test_lifecycle_engine.py +137 -0
- package/src/lifecycle/tests/test_lifecycle_evaluation.py +177 -0
- package/src/lifecycle/tests/test_lifecycle_scheduler.py +127 -0
- package/src/lifecycle/tests/test_lifecycle_search.py +109 -0
- package/src/lifecycle/tests/test_mcp_compact.py +149 -0
- package/src/lifecycle/tests/test_mcp_lifecycle_status.py +114 -0
- package/src/lifecycle/tests/test_retention_policy.py +162 -0
- package/src/mcp_tools_v28.py +280 -0
- package/src/memory-profiles.py +2 -12
- package/src/memory-reset.py +2 -12
- package/src/memory_compression.py +2 -12
- package/src/memory_store_v2.py +76 -20
- package/src/migrate_v1_to_v2.py +2 -12
- package/src/pattern_learner.py +29 -975
- package/src/patterns/__init__.py +24 -0
- package/src/patterns/analyzers.py +247 -0
- package/src/patterns/learner.py +267 -0
- package/src/patterns/scoring.py +167 -0
- package/src/patterns/store.py +223 -0
- package/src/patterns/terminology.py +138 -0
- package/src/provenance_tracker.py +4 -14
- package/src/query_optimizer.py +4 -6
- package/src/rate_limiter.py +2 -6
- package/src/search/__init__.py +20 -0
- package/src/search/cli.py +77 -0
- package/src/search/constants.py +26 -0
- package/src/search/engine.py +239 -0
- package/src/search/fusion.py +122 -0
- package/src/search/index_loader.py +112 -0
- package/src/search/methods.py +162 -0
- package/src/search_engine_v2.py +4 -6
- package/src/setup_validator.py +7 -13
- package/src/subscription_manager.py +2 -12
- package/src/tree/__init__.py +59 -0
- package/src/tree/builder.py +183 -0
- package/src/tree/nodes.py +196 -0
- package/src/tree/queries.py +252 -0
- package/src/tree/schema.py +76 -0
- package/src/tree_manager.py +10 -711
- package/src/trust/__init__.py +45 -0
- package/src/trust/constants.py +66 -0
- package/src/trust/queries.py +157 -0
- package/src/trust/schema.py +95 -0
- package/src/trust/scorer.py +299 -0
- package/src/trust/signals.py +95 -0
- package/src/trust_scorer.py +39 -697
- package/src/webhook_dispatcher.py +2 -12
- package/ui/app.js +1 -1
- package/ui/js/agents.js +1 -1
- package/ui_server.py +2 -14
- package/ATTRIBUTION.md +0 -140
- package/docs/ARCHITECTURE-V2.5.md +0 -190
- package/docs/GRAPH-ENGINE.md +0 -503
- package/docs/architecture-diagram.drawio +0 -405
- package/docs/plans/2026-02-13-benchmark-suite.md +0 -1349
|
@@ -0,0 +1,302 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
|
|
3
|
+
"""Memory lifecycle state machine with formal transition rules.
|
|
4
|
+
|
|
5
|
+
State Machine:
|
|
6
|
+
ACTIVE -> WARM -> COLD -> ARCHIVED -> TOMBSTONED
|
|
7
|
+
|
|
8
|
+
Reactivation allowed from WARM, COLD, ARCHIVED back to ACTIVE.
|
|
9
|
+
TOMBSTONED is terminal (deletion only).
|
|
10
|
+
|
|
11
|
+
Each transition is recorded in lifecycle_history (JSON array) for auditability.
|
|
12
|
+
Thread-safe via threading.Lock() around read-modify-write operations.
|
|
13
|
+
"""
|
|
14
|
+
import sqlite3
|
|
15
|
+
import json
|
|
16
|
+
import threading
|
|
17
|
+
from datetime import datetime
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
from typing import Optional, Dict, Any, List
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class LifecycleEngine:
|
|
23
|
+
"""Manages memory lifecycle states: ACTIVE -> WARM -> COLD -> ARCHIVED -> TOMBSTONED."""
|
|
24
|
+
|
|
25
|
+
STATES = ("active", "warm", "cold", "archived", "tombstoned")
|
|
26
|
+
|
|
27
|
+
TRANSITIONS = {
|
|
28
|
+
"active": ["warm"],
|
|
29
|
+
"warm": ["active", "cold"],
|
|
30
|
+
"cold": ["active", "archived"],
|
|
31
|
+
"archived": ["active", "tombstoned"],
|
|
32
|
+
"tombstoned": [], # Terminal state
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
def __init__(self, db_path: Optional[str] = None, config_path: Optional[str] = None):
|
|
36
|
+
if db_path is None:
|
|
37
|
+
db_path = Path.home() / ".claude-memory" / "memory.db"
|
|
38
|
+
self._db_path = str(db_path)
|
|
39
|
+
self._config_path = config_path
|
|
40
|
+
self._lock = threading.Lock()
|
|
41
|
+
|
|
42
|
+
def _get_connection(self) -> sqlite3.Connection:
|
|
43
|
+
"""Get a SQLite connection to memory.db."""
|
|
44
|
+
conn = sqlite3.connect(self._db_path)
|
|
45
|
+
conn.row_factory = sqlite3.Row
|
|
46
|
+
return conn
|
|
47
|
+
|
|
48
|
+
def is_valid_transition(self, from_state: str, to_state: str) -> bool:
|
|
49
|
+
"""Check if a state transition is valid per the state machine.
|
|
50
|
+
|
|
51
|
+
Args:
|
|
52
|
+
from_state: Current lifecycle state
|
|
53
|
+
to_state: Target lifecycle state
|
|
54
|
+
|
|
55
|
+
Returns:
|
|
56
|
+
True if the transition is allowed, False otherwise
|
|
57
|
+
"""
|
|
58
|
+
if from_state not in self.TRANSITIONS:
|
|
59
|
+
return False
|
|
60
|
+
return to_state in self.TRANSITIONS[from_state]
|
|
61
|
+
|
|
62
|
+
def get_memory_state(self, memory_id: int) -> Optional[str]:
|
|
63
|
+
"""Get the current lifecycle state of a memory.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
memory_id: The memory's database ID
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
The lifecycle state string, or None if memory not found
|
|
70
|
+
"""
|
|
71
|
+
conn = self._get_connection()
|
|
72
|
+
try:
|
|
73
|
+
row = conn.execute(
|
|
74
|
+
"SELECT lifecycle_state FROM memories WHERE id = ?",
|
|
75
|
+
(memory_id,),
|
|
76
|
+
).fetchone()
|
|
77
|
+
if row is None:
|
|
78
|
+
return None
|
|
79
|
+
return row["lifecycle_state"] or "active"
|
|
80
|
+
finally:
|
|
81
|
+
conn.close()
|
|
82
|
+
|
|
83
|
+
def transition_memory(
|
|
84
|
+
self,
|
|
85
|
+
memory_id: int,
|
|
86
|
+
to_state: str,
|
|
87
|
+
reason: str = "",
|
|
88
|
+
) -> Dict[str, Any]:
|
|
89
|
+
"""Transition a memory to a new lifecycle state.
|
|
90
|
+
|
|
91
|
+
Validates the transition against the state machine, updates the database,
|
|
92
|
+
and appends to the lifecycle_history JSON array.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
memory_id: The memory's database ID
|
|
96
|
+
to_state: Target lifecycle state
|
|
97
|
+
reason: Human-readable reason for the transition
|
|
98
|
+
|
|
99
|
+
Returns:
|
|
100
|
+
Dict with success/failure status, from_state, to_state, etc.
|
|
101
|
+
"""
|
|
102
|
+
with self._lock:
|
|
103
|
+
conn = self._get_connection()
|
|
104
|
+
try:
|
|
105
|
+
row = conn.execute(
|
|
106
|
+
"SELECT lifecycle_state, lifecycle_history FROM memories WHERE id = ?",
|
|
107
|
+
(memory_id,),
|
|
108
|
+
).fetchone()
|
|
109
|
+
|
|
110
|
+
if row is None:
|
|
111
|
+
return {"success": False, "error": f"Memory {memory_id} not found"}
|
|
112
|
+
|
|
113
|
+
from_state = row["lifecycle_state"] or "active"
|
|
114
|
+
|
|
115
|
+
if not self.is_valid_transition(from_state, to_state):
|
|
116
|
+
return {
|
|
117
|
+
"success": False,
|
|
118
|
+
"error": f"Invalid transition from '{from_state}' to '{to_state}'",
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
now = datetime.now().isoformat()
|
|
122
|
+
history = json.loads(row["lifecycle_history"] or "[]")
|
|
123
|
+
history.append({
|
|
124
|
+
"from": from_state,
|
|
125
|
+
"to": to_state,
|
|
126
|
+
"reason": reason,
|
|
127
|
+
"timestamp": now,
|
|
128
|
+
})
|
|
129
|
+
|
|
130
|
+
conn.execute(
|
|
131
|
+
"""UPDATE memories
|
|
132
|
+
SET lifecycle_state = ?,
|
|
133
|
+
lifecycle_updated_at = ?,
|
|
134
|
+
lifecycle_history = ?
|
|
135
|
+
WHERE id = ?""",
|
|
136
|
+
(to_state, now, json.dumps(history), memory_id),
|
|
137
|
+
)
|
|
138
|
+
conn.commit()
|
|
139
|
+
|
|
140
|
+
self._try_emit_event("lifecycle.transitioned", memory_id, {
|
|
141
|
+
"from_state": from_state,
|
|
142
|
+
"to_state": to_state,
|
|
143
|
+
"reason": reason,
|
|
144
|
+
})
|
|
145
|
+
|
|
146
|
+
return {
|
|
147
|
+
"success": True,
|
|
148
|
+
"from_state": from_state,
|
|
149
|
+
"to_state": to_state,
|
|
150
|
+
"memory_id": memory_id,
|
|
151
|
+
"reason": reason,
|
|
152
|
+
"timestamp": now,
|
|
153
|
+
}
|
|
154
|
+
finally:
|
|
155
|
+
conn.close()
|
|
156
|
+
|
|
157
|
+
def batch_transition(
|
|
158
|
+
self,
|
|
159
|
+
memory_ids: List[int],
|
|
160
|
+
to_state: str,
|
|
161
|
+
reasons: Optional[List[str]] = None,
|
|
162
|
+
) -> Dict[str, Any]:
|
|
163
|
+
"""Transition multiple memories in a single connection + commit.
|
|
164
|
+
|
|
165
|
+
Validates each transition individually, skips invalid ones.
|
|
166
|
+
Much faster than calling transition_memory() in a loop because
|
|
167
|
+
it opens only one connection and commits once.
|
|
168
|
+
|
|
169
|
+
Args:
|
|
170
|
+
memory_ids: List of memory IDs to transition
|
|
171
|
+
to_state: Target lifecycle state for all
|
|
172
|
+
reasons: Per-memory reasons (defaults to empty string)
|
|
173
|
+
|
|
174
|
+
Returns:
|
|
175
|
+
Dict with succeeded (list), failed (list), and counts
|
|
176
|
+
"""
|
|
177
|
+
if reasons is None:
|
|
178
|
+
reasons = [""] * len(memory_ids)
|
|
179
|
+
|
|
180
|
+
succeeded: List[Dict[str, Any]] = []
|
|
181
|
+
failed: List[Dict[str, Any]] = []
|
|
182
|
+
|
|
183
|
+
with self._lock:
|
|
184
|
+
conn = self._get_connection()
|
|
185
|
+
try:
|
|
186
|
+
now = datetime.now().isoformat()
|
|
187
|
+
|
|
188
|
+
for mem_id, reason in zip(memory_ids, reasons):
|
|
189
|
+
row = conn.execute(
|
|
190
|
+
"SELECT lifecycle_state, lifecycle_history "
|
|
191
|
+
"FROM memories WHERE id = ?",
|
|
192
|
+
(mem_id,),
|
|
193
|
+
).fetchone()
|
|
194
|
+
|
|
195
|
+
if row is None:
|
|
196
|
+
failed.append({"memory_id": mem_id, "error": "not_found"})
|
|
197
|
+
continue
|
|
198
|
+
|
|
199
|
+
from_state = row["lifecycle_state"] or "active"
|
|
200
|
+
if not self.is_valid_transition(from_state, to_state):
|
|
201
|
+
failed.append({
|
|
202
|
+
"memory_id": mem_id,
|
|
203
|
+
"error": f"invalid_{from_state}_to_{to_state}",
|
|
204
|
+
})
|
|
205
|
+
continue
|
|
206
|
+
|
|
207
|
+
history = json.loads(row["lifecycle_history"] or "[]")
|
|
208
|
+
history.append({
|
|
209
|
+
"from": from_state,
|
|
210
|
+
"to": to_state,
|
|
211
|
+
"reason": reason,
|
|
212
|
+
"timestamp": now,
|
|
213
|
+
})
|
|
214
|
+
|
|
215
|
+
conn.execute(
|
|
216
|
+
"""UPDATE memories
|
|
217
|
+
SET lifecycle_state = ?,
|
|
218
|
+
lifecycle_updated_at = ?,
|
|
219
|
+
lifecycle_history = ?
|
|
220
|
+
WHERE id = ?""",
|
|
221
|
+
(to_state, now, json.dumps(history), mem_id),
|
|
222
|
+
)
|
|
223
|
+
succeeded.append({
|
|
224
|
+
"memory_id": mem_id,
|
|
225
|
+
"from_state": from_state,
|
|
226
|
+
"to_state": to_state,
|
|
227
|
+
})
|
|
228
|
+
|
|
229
|
+
conn.commit()
|
|
230
|
+
|
|
231
|
+
# Best-effort event emission for each transitioned memory
|
|
232
|
+
for entry in succeeded:
|
|
233
|
+
self._try_emit_event(
|
|
234
|
+
"lifecycle.transitioned", entry["memory_id"], {
|
|
235
|
+
"from_state": entry["from_state"],
|
|
236
|
+
"to_state": entry["to_state"],
|
|
237
|
+
"reason": "batch",
|
|
238
|
+
},
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
return {
|
|
242
|
+
"succeeded": succeeded,
|
|
243
|
+
"failed": failed,
|
|
244
|
+
"total": len(memory_ids),
|
|
245
|
+
"success_count": len(succeeded),
|
|
246
|
+
"fail_count": len(failed),
|
|
247
|
+
}
|
|
248
|
+
finally:
|
|
249
|
+
conn.close()
|
|
250
|
+
|
|
251
|
+
def reactivate_memory(
|
|
252
|
+
self,
|
|
253
|
+
memory_id: int,
|
|
254
|
+
trigger: str = "",
|
|
255
|
+
) -> Dict[str, Any]:
|
|
256
|
+
"""Reactivate a non-active memory back to ACTIVE state.
|
|
257
|
+
|
|
258
|
+
Convenience wrapper around transition_memory for reactivation.
|
|
259
|
+
Valid from WARM, COLD, or ARCHIVED states.
|
|
260
|
+
|
|
261
|
+
Args:
|
|
262
|
+
memory_id: The memory's database ID
|
|
263
|
+
trigger: What triggered reactivation (e.g., "recall", "explicit")
|
|
264
|
+
|
|
265
|
+
Returns:
|
|
266
|
+
Dict with success/failure status
|
|
267
|
+
"""
|
|
268
|
+
return self.transition_memory(
|
|
269
|
+
memory_id, "active", reason=f"reactivated:{trigger}"
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
def get_state_distribution(self) -> Dict[str, int]:
|
|
273
|
+
"""Get count of memories in each lifecycle state.
|
|
274
|
+
|
|
275
|
+
Returns:
|
|
276
|
+
Dict mapping state names to counts (all STATES keys present)
|
|
277
|
+
"""
|
|
278
|
+
conn = self._get_connection()
|
|
279
|
+
try:
|
|
280
|
+
dist = {state: 0 for state in self.STATES}
|
|
281
|
+
rows = conn.execute(
|
|
282
|
+
"SELECT lifecycle_state, COUNT(*) as cnt "
|
|
283
|
+
"FROM memories GROUP BY lifecycle_state"
|
|
284
|
+
).fetchall()
|
|
285
|
+
for row in rows:
|
|
286
|
+
state = row["lifecycle_state"] if row["lifecycle_state"] else "active"
|
|
287
|
+
if state in dist:
|
|
288
|
+
dist[state] = row["cnt"]
|
|
289
|
+
return dist
|
|
290
|
+
finally:
|
|
291
|
+
conn.close()
|
|
292
|
+
|
|
293
|
+
def _try_emit_event(
|
|
294
|
+
self, event_type: str, memory_id: int, payload: dict
|
|
295
|
+
) -> None:
|
|
296
|
+
"""Best-effort EventBus emission. Fails silently if unavailable."""
|
|
297
|
+
try:
|
|
298
|
+
from event_bus import EventBus
|
|
299
|
+
bus = EventBus.get_instance(Path(self._db_path))
|
|
300
|
+
bus.emit(event_type, payload=payload, memory_id=memory_id)
|
|
301
|
+
except Exception:
|
|
302
|
+
pass
|
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
|
|
3
|
+
"""Lifecycle evaluation rules — determines which memories should transition.
|
|
4
|
+
|
|
5
|
+
Evaluates memories against configurable thresholds based on:
|
|
6
|
+
- Time since last access (staleness)
|
|
7
|
+
- Importance score
|
|
8
|
+
- Current lifecycle state
|
|
9
|
+
|
|
10
|
+
Default rules:
|
|
11
|
+
ACTIVE -> WARM: no access >= 30 days AND importance <= 6
|
|
12
|
+
WARM -> COLD: no access >= 90 days AND importance <= 4
|
|
13
|
+
COLD -> ARCHIVED: no access >= 180 days (any importance)
|
|
14
|
+
|
|
15
|
+
Thresholds configurable via lifecycle_config.json.
|
|
16
|
+
"""
|
|
17
|
+
import sqlite3
|
|
18
|
+
import json
|
|
19
|
+
from datetime import datetime
|
|
20
|
+
from pathlib import Path
|
|
21
|
+
from typing import Optional, Dict, Any, List, Set
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
# Default evaluation thresholds
|
|
25
|
+
DEFAULT_EVAL_CONFIG: Dict[str, Dict[str, Any]] = {
|
|
26
|
+
"active_to_warm": {
|
|
27
|
+
"no_access_days": 30,
|
|
28
|
+
"max_importance": 6,
|
|
29
|
+
},
|
|
30
|
+
"warm_to_cold": {
|
|
31
|
+
"no_access_days": 90,
|
|
32
|
+
"max_importance": 4,
|
|
33
|
+
},
|
|
34
|
+
"cold_to_archived": {
|
|
35
|
+
"no_access_days": 180,
|
|
36
|
+
},
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class LifecycleEvaluator:
|
|
41
|
+
"""Evaluates memories for lifecycle state transitions.
|
|
42
|
+
|
|
43
|
+
Scans memories and recommends transitions based on staleness and importance.
|
|
44
|
+
Does NOT execute transitions — returns recommendations for the engine or
|
|
45
|
+
scheduler to act on.
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
def __init__(
|
|
49
|
+
self, db_path: Optional[str] = None, config_path: Optional[str] = None
|
|
50
|
+
):
|
|
51
|
+
if db_path is None:
|
|
52
|
+
db_path = str(Path.home() / ".claude-memory" / "memory.db")
|
|
53
|
+
self._db_path = str(db_path)
|
|
54
|
+
self._config_path = config_path
|
|
55
|
+
|
|
56
|
+
def _get_connection(self) -> sqlite3.Connection:
|
|
57
|
+
"""Get a SQLite connection to memory.db."""
|
|
58
|
+
conn = sqlite3.connect(self._db_path)
|
|
59
|
+
conn.row_factory = sqlite3.Row
|
|
60
|
+
return conn
|
|
61
|
+
|
|
62
|
+
def evaluate_memories(
|
|
63
|
+
self,
|
|
64
|
+
profile: Optional[str] = None,
|
|
65
|
+
retention_overrides: Optional[Set[int]] = None,
|
|
66
|
+
) -> List[Dict[str, Any]]:
|
|
67
|
+
"""Scan all memories and return recommended transitions.
|
|
68
|
+
|
|
69
|
+
Args:
|
|
70
|
+
profile: Filter by profile (None = all profiles)
|
|
71
|
+
retention_overrides: Set of memory IDs to skip (retention-protected)
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
List of recommendation dicts with memory_id, from_state, to_state, reason
|
|
75
|
+
"""
|
|
76
|
+
config = self._load_config()
|
|
77
|
+
overrides = retention_overrides or set()
|
|
78
|
+
|
|
79
|
+
conn = self._get_connection()
|
|
80
|
+
try:
|
|
81
|
+
query = (
|
|
82
|
+
"SELECT id, lifecycle_state, importance, last_accessed, created_at "
|
|
83
|
+
"FROM memories WHERE lifecycle_state IN ('active', 'warm', 'cold')"
|
|
84
|
+
)
|
|
85
|
+
params: list = []
|
|
86
|
+
if profile:
|
|
87
|
+
query += " AND profile = ?"
|
|
88
|
+
params.append(profile)
|
|
89
|
+
|
|
90
|
+
rows = conn.execute(query, params).fetchall()
|
|
91
|
+
recommendations = []
|
|
92
|
+
now = datetime.now()
|
|
93
|
+
|
|
94
|
+
for row in rows:
|
|
95
|
+
if row["id"] in overrides:
|
|
96
|
+
continue
|
|
97
|
+
rec = self._evaluate_row(row, config, now)
|
|
98
|
+
if rec:
|
|
99
|
+
recommendations.append(rec)
|
|
100
|
+
|
|
101
|
+
return recommendations
|
|
102
|
+
finally:
|
|
103
|
+
conn.close()
|
|
104
|
+
|
|
105
|
+
def evaluate_single(
|
|
106
|
+
self,
|
|
107
|
+
memory_id: int,
|
|
108
|
+
retention_overrides: Optional[Set[int]] = None,
|
|
109
|
+
) -> Optional[Dict[str, Any]]:
|
|
110
|
+
"""Evaluate a single memory for potential transition.
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
memory_id: The memory's database ID
|
|
114
|
+
retention_overrides: Set of memory IDs to skip
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
Recommendation dict, or None if no transition recommended
|
|
118
|
+
"""
|
|
119
|
+
overrides = retention_overrides or set()
|
|
120
|
+
if memory_id in overrides:
|
|
121
|
+
return None
|
|
122
|
+
|
|
123
|
+
config = self._load_config()
|
|
124
|
+
conn = self._get_connection()
|
|
125
|
+
try:
|
|
126
|
+
row = conn.execute(
|
|
127
|
+
"SELECT id, lifecycle_state, importance, last_accessed, created_at "
|
|
128
|
+
"FROM memories WHERE id = ?",
|
|
129
|
+
(memory_id,),
|
|
130
|
+
).fetchone()
|
|
131
|
+
if row is None:
|
|
132
|
+
return None
|
|
133
|
+
return self._evaluate_row(row, config, datetime.now())
|
|
134
|
+
finally:
|
|
135
|
+
conn.close()
|
|
136
|
+
|
|
137
|
+
def _evaluate_row(
|
|
138
|
+
self, row: sqlite3.Row, config: Dict, now: datetime
|
|
139
|
+
) -> Optional[Dict[str, Any]]:
|
|
140
|
+
"""Evaluate a single memory row against transition rules."""
|
|
141
|
+
state = row["lifecycle_state"] or "active"
|
|
142
|
+
importance = row["importance"] or 5
|
|
143
|
+
|
|
144
|
+
# Determine staleness: prefer last_accessed, fall back to created_at
|
|
145
|
+
last_access_str = row["last_accessed"] or row["created_at"]
|
|
146
|
+
if last_access_str:
|
|
147
|
+
try:
|
|
148
|
+
last_access = datetime.fromisoformat(str(last_access_str))
|
|
149
|
+
except (ValueError, TypeError):
|
|
150
|
+
last_access = now # Unparseable -> treat as recent (safe default)
|
|
151
|
+
else:
|
|
152
|
+
last_access = now
|
|
153
|
+
|
|
154
|
+
days_stale = (now - last_access).days
|
|
155
|
+
|
|
156
|
+
if state == "active":
|
|
157
|
+
rules = config.get("active_to_warm", {})
|
|
158
|
+
threshold_days = rules.get("no_access_days", 30)
|
|
159
|
+
max_importance = rules.get("max_importance", 6)
|
|
160
|
+
if days_stale >= threshold_days and importance <= max_importance:
|
|
161
|
+
return self._build_recommendation(
|
|
162
|
+
row["id"], "active", "warm", days_stale, importance
|
|
163
|
+
)
|
|
164
|
+
elif state == "warm":
|
|
165
|
+
rules = config.get("warm_to_cold", {})
|
|
166
|
+
threshold_days = rules.get("no_access_days", 90)
|
|
167
|
+
max_importance = rules.get("max_importance", 4)
|
|
168
|
+
if days_stale >= threshold_days and importance <= max_importance:
|
|
169
|
+
return self._build_recommendation(
|
|
170
|
+
row["id"], "warm", "cold", days_stale, importance
|
|
171
|
+
)
|
|
172
|
+
elif state == "cold":
|
|
173
|
+
rules = config.get("cold_to_archived", {})
|
|
174
|
+
threshold_days = rules.get("no_access_days", 180)
|
|
175
|
+
if days_stale >= threshold_days:
|
|
176
|
+
return self._build_recommendation(
|
|
177
|
+
row["id"], "cold", "archived", days_stale, importance
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
return None
|
|
181
|
+
|
|
182
|
+
def _build_recommendation(
|
|
183
|
+
self,
|
|
184
|
+
memory_id: int,
|
|
185
|
+
from_state: str,
|
|
186
|
+
to_state: str,
|
|
187
|
+
days_stale: int,
|
|
188
|
+
importance: int,
|
|
189
|
+
) -> Dict[str, Any]:
|
|
190
|
+
"""Build a standardized recommendation dict."""
|
|
191
|
+
reason = f"no_access_{days_stale}d"
|
|
192
|
+
if to_state != "archived":
|
|
193
|
+
reason += f"_importance_{importance}"
|
|
194
|
+
return {
|
|
195
|
+
"memory_id": memory_id,
|
|
196
|
+
"from_state": from_state,
|
|
197
|
+
"to_state": to_state,
|
|
198
|
+
"reason": reason,
|
|
199
|
+
"days_stale": days_stale,
|
|
200
|
+
"importance": importance,
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
def _load_config(self) -> Dict[str, Any]:
|
|
204
|
+
"""Load lifecycle evaluation config from JSON. Returns defaults if missing."""
|
|
205
|
+
try:
|
|
206
|
+
if self._config_path:
|
|
207
|
+
config_path = Path(self._config_path)
|
|
208
|
+
else:
|
|
209
|
+
config_path = Path(self._db_path).parent / "lifecycle_config.json"
|
|
210
|
+
if config_path.exists():
|
|
211
|
+
with open(config_path) as f:
|
|
212
|
+
user_config = json.load(f)
|
|
213
|
+
merged: Dict[str, Any] = {}
|
|
214
|
+
for key in DEFAULT_EVAL_CONFIG:
|
|
215
|
+
if key in user_config and isinstance(user_config[key], dict):
|
|
216
|
+
merged[key] = {**DEFAULT_EVAL_CONFIG[key], **user_config[key]}
|
|
217
|
+
else:
|
|
218
|
+
merged[key] = dict(DEFAULT_EVAL_CONFIG[key])
|
|
219
|
+
for key in user_config:
|
|
220
|
+
if key not in merged:
|
|
221
|
+
merged[key] = user_config[key]
|
|
222
|
+
return merged
|
|
223
|
+
except Exception:
|
|
224
|
+
pass
|
|
225
|
+
return {k: dict(v) for k, v in DEFAULT_EVAL_CONFIG.items()}
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
|
|
3
|
+
"""Background scheduler for periodic lifecycle evaluation and enforcement.
|
|
4
|
+
|
|
5
|
+
Runs on a configurable interval (default: 6 hours) to:
|
|
6
|
+
1. Evaluate all memories for lifecycle transitions
|
|
7
|
+
2. Execute recommended transitions
|
|
8
|
+
3. Enforce bounded growth limits
|
|
9
|
+
|
|
10
|
+
Uses daemon threading — does not prevent process exit.
|
|
11
|
+
"""
|
|
12
|
+
import threading
|
|
13
|
+
from datetime import datetime
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from typing import Optional, Dict, Any, List
|
|
16
|
+
|
|
17
|
+
from .lifecycle_engine import LifecycleEngine
|
|
18
|
+
from .lifecycle_evaluator import LifecycleEvaluator
|
|
19
|
+
from .bounded_growth import BoundedGrowthEnforcer
|
|
20
|
+
|
|
21
|
+
# Default interval: 6 hours
|
|
22
|
+
DEFAULT_INTERVAL_SECONDS = 21600
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class LifecycleScheduler:
|
|
26
|
+
"""Background scheduler for periodic lifecycle evaluation.
|
|
27
|
+
|
|
28
|
+
Orchestrates the evaluator, engine, and bounded growth enforcer
|
|
29
|
+
on a configurable timer interval.
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
def __init__(
|
|
33
|
+
self,
|
|
34
|
+
db_path: Optional[str] = None,
|
|
35
|
+
config_path: Optional[str] = None,
|
|
36
|
+
interval_seconds: int = DEFAULT_INTERVAL_SECONDS,
|
|
37
|
+
):
|
|
38
|
+
if db_path is None:
|
|
39
|
+
db_path = str(Path.home() / ".claude-memory" / "memory.db")
|
|
40
|
+
self._db_path = str(db_path)
|
|
41
|
+
self._config_path = config_path
|
|
42
|
+
self.interval_seconds = interval_seconds
|
|
43
|
+
|
|
44
|
+
self._engine = LifecycleEngine(self._db_path, config_path=config_path)
|
|
45
|
+
self._evaluator = LifecycleEvaluator(self._db_path, config_path=config_path)
|
|
46
|
+
self._enforcer = BoundedGrowthEnforcer(self._db_path, config_path=config_path)
|
|
47
|
+
|
|
48
|
+
self._timer: Optional[threading.Timer] = None
|
|
49
|
+
self._running = False
|
|
50
|
+
self._lock = threading.Lock()
|
|
51
|
+
|
|
52
|
+
@property
|
|
53
|
+
def is_running(self) -> bool:
|
|
54
|
+
"""Whether the scheduler is currently running."""
|
|
55
|
+
return self._running
|
|
56
|
+
|
|
57
|
+
def start(self) -> None:
|
|
58
|
+
"""Start the background scheduler."""
|
|
59
|
+
with self._lock:
|
|
60
|
+
if self._running:
|
|
61
|
+
return
|
|
62
|
+
self._running = True
|
|
63
|
+
self._schedule_next()
|
|
64
|
+
|
|
65
|
+
def stop(self) -> None:
|
|
66
|
+
"""Stop the background scheduler."""
|
|
67
|
+
with self._lock:
|
|
68
|
+
self._running = False
|
|
69
|
+
if self._timer is not None:
|
|
70
|
+
self._timer.cancel()
|
|
71
|
+
self._timer = None
|
|
72
|
+
|
|
73
|
+
def run_now(self) -> Dict[str, Any]:
|
|
74
|
+
"""Execute a lifecycle evaluation cycle immediately.
|
|
75
|
+
|
|
76
|
+
Returns:
|
|
77
|
+
Dict with evaluation results, enforcement results, and timestamp
|
|
78
|
+
"""
|
|
79
|
+
return self._execute_cycle()
|
|
80
|
+
|
|
81
|
+
def _schedule_next(self) -> None:
|
|
82
|
+
"""Schedule the next evaluation cycle."""
|
|
83
|
+
self._timer = threading.Timer(self.interval_seconds, self._run_cycle)
|
|
84
|
+
self._timer.daemon = True
|
|
85
|
+
self._timer.start()
|
|
86
|
+
|
|
87
|
+
def _run_cycle(self) -> None:
|
|
88
|
+
"""Run one evaluation cycle, then schedule the next."""
|
|
89
|
+
try:
|
|
90
|
+
self._execute_cycle()
|
|
91
|
+
except Exception:
|
|
92
|
+
pass # Scheduler must not crash
|
|
93
|
+
finally:
|
|
94
|
+
with self._lock:
|
|
95
|
+
if self._running:
|
|
96
|
+
self._schedule_next()
|
|
97
|
+
|
|
98
|
+
def _execute_cycle(self) -> Dict[str, Any]:
|
|
99
|
+
"""Core evaluation + enforcement logic.
|
|
100
|
+
|
|
101
|
+
1. Evaluate all memories for potential transitions
|
|
102
|
+
2. Execute recommended transitions via the engine
|
|
103
|
+
3. Enforce bounded growth limits
|
|
104
|
+
"""
|
|
105
|
+
# Step 1: Evaluate
|
|
106
|
+
recommendations = self._evaluator.evaluate_memories()
|
|
107
|
+
|
|
108
|
+
# Step 2: Execute transitions
|
|
109
|
+
transitioned = 0
|
|
110
|
+
transition_results: List[Dict] = []
|
|
111
|
+
for rec in recommendations:
|
|
112
|
+
result = self._engine.transition_memory(
|
|
113
|
+
rec["memory_id"], rec["to_state"], reason=rec["reason"]
|
|
114
|
+
)
|
|
115
|
+
if result.get("success"):
|
|
116
|
+
transitioned += 1
|
|
117
|
+
transition_results.append(result)
|
|
118
|
+
|
|
119
|
+
# Step 3: Enforce bounds
|
|
120
|
+
enforcement = self._enforcer.enforce_bounds()
|
|
121
|
+
|
|
122
|
+
return {
|
|
123
|
+
"timestamp": datetime.now().isoformat(),
|
|
124
|
+
"evaluation": {
|
|
125
|
+
"recommendations": recommendations,
|
|
126
|
+
"transitioned": transitioned,
|
|
127
|
+
"transition_results": transition_results,
|
|
128
|
+
},
|
|
129
|
+
"enforcement": enforcement,
|
|
130
|
+
}
|