superlocalmemory 2.7.6 → 2.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +120 -155
- package/README.md +115 -89
- package/api_server.py +2 -12
- package/docs/PATTERN-LEARNING.md +64 -199
- package/docs/example_graph_usage.py +4 -6
- package/install.sh +59 -0
- package/mcp_server.py +83 -7
- package/package.json +1 -8
- package/scripts/generate-thumbnails.py +3 -5
- package/skills/slm-build-graph/SKILL.md +1 -1
- package/skills/slm-list-recent/SKILL.md +1 -1
- package/skills/slm-recall/SKILL.md +1 -1
- package/skills/slm-remember/SKILL.md +1 -1
- package/skills/slm-show-patterns/SKILL.md +1 -1
- package/skills/slm-status/SKILL.md +1 -1
- package/skills/slm-switch-profile/SKILL.md +1 -1
- package/src/agent_registry.py +7 -18
- package/src/auth_middleware.py +3 -5
- package/src/auto_backup.py +3 -7
- package/src/behavioral/__init__.py +49 -0
- package/src/behavioral/behavioral_listener.py +203 -0
- package/src/behavioral/behavioral_patterns.py +275 -0
- package/src/behavioral/cross_project_transfer.py +206 -0
- package/src/behavioral/outcome_inference.py +194 -0
- package/src/behavioral/outcome_tracker.py +193 -0
- package/src/behavioral/tests/__init__.py +4 -0
- package/src/behavioral/tests/test_behavioral_integration.py +108 -0
- package/src/behavioral/tests/test_behavioral_patterns.py +150 -0
- package/src/behavioral/tests/test_cross_project_transfer.py +142 -0
- package/src/behavioral/tests/test_mcp_behavioral.py +139 -0
- package/src/behavioral/tests/test_mcp_report_outcome.py +117 -0
- package/src/behavioral/tests/test_outcome_inference.py +107 -0
- package/src/behavioral/tests/test_outcome_tracker.py +96 -0
- package/src/cache_manager.py +4 -6
- package/src/compliance/__init__.py +48 -0
- package/src/compliance/abac_engine.py +149 -0
- package/src/compliance/abac_middleware.py +116 -0
- package/src/compliance/audit_db.py +215 -0
- package/src/compliance/audit_logger.py +148 -0
- package/src/compliance/retention_manager.py +289 -0
- package/src/compliance/retention_scheduler.py +186 -0
- package/src/compliance/tests/__init__.py +4 -0
- package/src/compliance/tests/test_abac_enforcement.py +95 -0
- package/src/compliance/tests/test_abac_engine.py +124 -0
- package/src/compliance/tests/test_abac_mcp_integration.py +118 -0
- package/src/compliance/tests/test_audit_db.py +123 -0
- package/src/compliance/tests/test_audit_logger.py +98 -0
- package/src/compliance/tests/test_mcp_audit.py +128 -0
- package/src/compliance/tests/test_mcp_retention_policy.py +125 -0
- package/src/compliance/tests/test_retention_manager.py +131 -0
- package/src/compliance/tests/test_retention_scheduler.py +99 -0
- package/src/db_connection_manager.py +2 -12
- package/src/embedding_engine.py +61 -669
- package/src/embeddings/__init__.py +47 -0
- package/src/embeddings/cache.py +70 -0
- package/src/embeddings/cli.py +113 -0
- package/src/embeddings/constants.py +47 -0
- package/src/embeddings/database.py +91 -0
- package/src/embeddings/engine.py +247 -0
- package/src/embeddings/model_loader.py +145 -0
- package/src/event_bus.py +3 -13
- package/src/graph/__init__.py +36 -0
- package/src/graph/build_helpers.py +74 -0
- package/src/graph/cli.py +87 -0
- package/src/graph/cluster_builder.py +188 -0
- package/src/graph/cluster_summary.py +148 -0
- package/src/graph/constants.py +47 -0
- package/src/graph/edge_builder.py +162 -0
- package/src/graph/entity_extractor.py +95 -0
- package/src/graph/graph_core.py +226 -0
- package/src/graph/graph_search.py +231 -0
- package/src/graph/hierarchical.py +207 -0
- package/src/graph/schema.py +99 -0
- package/src/graph_engine.py +45 -1451
- package/src/hnsw_index.py +3 -7
- package/src/hybrid_search.py +36 -683
- package/src/learning/__init__.py +27 -12
- package/src/learning/adaptive_ranker.py +50 -12
- package/src/learning/cross_project_aggregator.py +2 -12
- package/src/learning/engagement_tracker.py +2 -12
- package/src/learning/feature_extractor.py +175 -43
- package/src/learning/feedback_collector.py +7 -12
- package/src/learning/learning_db.py +180 -12
- package/src/learning/project_context_manager.py +2 -12
- package/src/learning/source_quality_scorer.py +2 -12
- package/src/learning/synthetic_bootstrap.py +2 -12
- package/src/learning/tests/__init__.py +2 -0
- package/src/learning/tests/test_adaptive_ranker.py +2 -6
- package/src/learning/tests/test_adaptive_ranker_v28.py +60 -0
- package/src/learning/tests/test_aggregator.py +2 -6
- package/src/learning/tests/test_auto_retrain_v28.py +35 -0
- package/src/learning/tests/test_e2e_ranking_v28.py +82 -0
- package/src/learning/tests/test_feature_extractor_v28.py +93 -0
- package/src/learning/tests/test_feedback_collector.py +2 -6
- package/src/learning/tests/test_learning_db.py +2 -6
- package/src/learning/tests/test_learning_db_v28.py +110 -0
- package/src/learning/tests/test_learning_init_v28.py +48 -0
- package/src/learning/tests/test_outcome_signals.py +48 -0
- package/src/learning/tests/test_project_context.py +2 -6
- package/src/learning/tests/test_schema_migration.py +319 -0
- package/src/learning/tests/test_signal_inference.py +11 -13
- package/src/learning/tests/test_source_quality.py +2 -6
- package/src/learning/tests/test_synthetic_bootstrap.py +3 -7
- package/src/learning/tests/test_workflow_miner.py +2 -6
- package/src/learning/workflow_pattern_miner.py +2 -12
- package/src/lifecycle/__init__.py +54 -0
- package/src/lifecycle/bounded_growth.py +239 -0
- package/src/lifecycle/compaction_engine.py +226 -0
- package/src/lifecycle/lifecycle_engine.py +302 -0
- package/src/lifecycle/lifecycle_evaluator.py +225 -0
- package/src/lifecycle/lifecycle_scheduler.py +130 -0
- package/src/lifecycle/retention_policy.py +285 -0
- package/src/lifecycle/tests/__init__.py +4 -0
- package/src/lifecycle/tests/test_bounded_growth.py +193 -0
- package/src/lifecycle/tests/test_compaction.py +179 -0
- package/src/lifecycle/tests/test_lifecycle_engine.py +137 -0
- package/src/lifecycle/tests/test_lifecycle_evaluation.py +177 -0
- package/src/lifecycle/tests/test_lifecycle_scheduler.py +127 -0
- package/src/lifecycle/tests/test_lifecycle_search.py +109 -0
- package/src/lifecycle/tests/test_mcp_compact.py +149 -0
- package/src/lifecycle/tests/test_mcp_lifecycle_status.py +114 -0
- package/src/lifecycle/tests/test_retention_policy.py +162 -0
- package/src/mcp_tools_v28.py +280 -0
- package/src/memory-profiles.py +2 -12
- package/src/memory-reset.py +2 -12
- package/src/memory_compression.py +2 -12
- package/src/memory_store_v2.py +76 -20
- package/src/migrate_v1_to_v2.py +2 -12
- package/src/pattern_learner.py +29 -975
- package/src/patterns/__init__.py +24 -0
- package/src/patterns/analyzers.py +247 -0
- package/src/patterns/learner.py +267 -0
- package/src/patterns/scoring.py +167 -0
- package/src/patterns/store.py +223 -0
- package/src/patterns/terminology.py +138 -0
- package/src/provenance_tracker.py +4 -14
- package/src/query_optimizer.py +4 -6
- package/src/rate_limiter.py +2 -6
- package/src/search/__init__.py +20 -0
- package/src/search/cli.py +77 -0
- package/src/search/constants.py +26 -0
- package/src/search/engine.py +239 -0
- package/src/search/fusion.py +122 -0
- package/src/search/index_loader.py +112 -0
- package/src/search/methods.py +162 -0
- package/src/search_engine_v2.py +4 -6
- package/src/setup_validator.py +7 -13
- package/src/subscription_manager.py +2 -12
- package/src/tree/__init__.py +59 -0
- package/src/tree/builder.py +183 -0
- package/src/tree/nodes.py +196 -0
- package/src/tree/queries.py +252 -0
- package/src/tree/schema.py +76 -0
- package/src/tree_manager.py +10 -711
- package/src/trust/__init__.py +45 -0
- package/src/trust/constants.py +66 -0
- package/src/trust/queries.py +157 -0
- package/src/trust/schema.py +95 -0
- package/src/trust/scorer.py +299 -0
- package/src/trust/signals.py +95 -0
- package/src/trust_scorer.py +39 -697
- package/src/webhook_dispatcher.py +2 -12
- package/ui/app.js +1 -1
- package/ui/js/agents.js +1 -1
- package/ui_server.py +2 -14
- package/ATTRIBUTION.md +0 -140
- package/docs/ARCHITECTURE-V2.5.md +0 -190
- package/docs/GRAPH-ENGINE.md +0 -503
- package/docs/architecture-diagram.drawio +0 -405
- package/docs/plans/2026-02-13-benchmark-suite.md +0 -1349
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
|
|
3
|
+
"""Tests for lifecycle state machine transitions.
|
|
4
|
+
"""
|
|
5
|
+
import sqlite3
|
|
6
|
+
import tempfile
|
|
7
|
+
import os
|
|
8
|
+
import sys
|
|
9
|
+
import json
|
|
10
|
+
import pytest
|
|
11
|
+
|
|
12
|
+
# Ensure src/ is importable and takes precedence (matches existing test pattern)
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
SRC_DIR = Path(__file__).resolve().parent.parent.parent # src/
|
|
15
|
+
_src_str = str(SRC_DIR)
|
|
16
|
+
if _src_str not in sys.path:
|
|
17
|
+
sys.path.insert(0, _src_str)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class TestLifecycleStates:
|
|
21
|
+
"""Test state definitions and valid transitions."""
|
|
22
|
+
|
|
23
|
+
def setup_method(self):
|
|
24
|
+
self.db_fd, self.db_path = tempfile.mkstemp(suffix=".db")
|
|
25
|
+
conn = sqlite3.connect(self.db_path)
|
|
26
|
+
conn.execute("""
|
|
27
|
+
CREATE TABLE memories (
|
|
28
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
29
|
+
content TEXT NOT NULL,
|
|
30
|
+
importance INTEGER DEFAULT 5,
|
|
31
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
32
|
+
last_accessed TIMESTAMP,
|
|
33
|
+
access_count INTEGER DEFAULT 0,
|
|
34
|
+
lifecycle_state TEXT DEFAULT 'active',
|
|
35
|
+
lifecycle_updated_at TIMESTAMP,
|
|
36
|
+
lifecycle_history TEXT DEFAULT '[]',
|
|
37
|
+
access_level TEXT DEFAULT 'public',
|
|
38
|
+
profile TEXT DEFAULT 'default'
|
|
39
|
+
)
|
|
40
|
+
""")
|
|
41
|
+
conn.execute("""
|
|
42
|
+
INSERT INTO memories (content, importance, lifecycle_state)
|
|
43
|
+
VALUES ('test memory', 5, 'active')
|
|
44
|
+
""")
|
|
45
|
+
conn.commit()
|
|
46
|
+
conn.close()
|
|
47
|
+
|
|
48
|
+
def teardown_method(self):
|
|
49
|
+
os.close(self.db_fd)
|
|
50
|
+
os.unlink(self.db_path)
|
|
51
|
+
|
|
52
|
+
def test_valid_states(self):
|
|
53
|
+
from lifecycle.lifecycle_engine import LifecycleEngine
|
|
54
|
+
engine = LifecycleEngine(self.db_path)
|
|
55
|
+
assert set(engine.STATES) == {"active", "warm", "cold", "archived", "tombstoned"}
|
|
56
|
+
|
|
57
|
+
def test_valid_transition_active_to_warm(self):
|
|
58
|
+
from lifecycle.lifecycle_engine import LifecycleEngine
|
|
59
|
+
engine = LifecycleEngine(self.db_path)
|
|
60
|
+
assert engine.is_valid_transition("active", "warm") is True
|
|
61
|
+
|
|
62
|
+
def test_invalid_transition_active_to_archived(self):
|
|
63
|
+
from lifecycle.lifecycle_engine import LifecycleEngine
|
|
64
|
+
engine = LifecycleEngine(self.db_path)
|
|
65
|
+
assert engine.is_valid_transition("active", "archived") is False
|
|
66
|
+
|
|
67
|
+
def test_reactivation_always_valid(self):
|
|
68
|
+
from lifecycle.lifecycle_engine import LifecycleEngine
|
|
69
|
+
engine = LifecycleEngine(self.db_path)
|
|
70
|
+
for state in ["warm", "cold", "archived"]:
|
|
71
|
+
assert engine.is_valid_transition(state, "active") is True
|
|
72
|
+
|
|
73
|
+
def test_tombstoned_is_terminal(self):
|
|
74
|
+
from lifecycle.lifecycle_engine import LifecycleEngine
|
|
75
|
+
engine = LifecycleEngine(self.db_path)
|
|
76
|
+
for state in engine.STATES:
|
|
77
|
+
if state != "tombstoned":
|
|
78
|
+
assert engine.is_valid_transition("tombstoned", state) is False
|
|
79
|
+
|
|
80
|
+
def test_transition_memory(self):
|
|
81
|
+
from lifecycle.lifecycle_engine import LifecycleEngine
|
|
82
|
+
engine = LifecycleEngine(self.db_path)
|
|
83
|
+
result = engine.transition_memory(1, "warm", reason="no_access_30d")
|
|
84
|
+
assert result["success"] is True
|
|
85
|
+
assert result["from_state"] == "active"
|
|
86
|
+
assert result["to_state"] == "warm"
|
|
87
|
+
|
|
88
|
+
def test_transition_updates_db(self):
|
|
89
|
+
from lifecycle.lifecycle_engine import LifecycleEngine
|
|
90
|
+
engine = LifecycleEngine(self.db_path)
|
|
91
|
+
engine.transition_memory(1, "warm", reason="no_access_30d")
|
|
92
|
+
conn = sqlite3.connect(self.db_path)
|
|
93
|
+
row = conn.execute("SELECT lifecycle_state FROM memories WHERE id=1").fetchone()
|
|
94
|
+
conn.close()
|
|
95
|
+
assert row[0] == "warm"
|
|
96
|
+
|
|
97
|
+
def test_transition_records_history(self):
|
|
98
|
+
from lifecycle.lifecycle_engine import LifecycleEngine
|
|
99
|
+
engine = LifecycleEngine(self.db_path)
|
|
100
|
+
engine.transition_memory(1, "warm", reason="no_access_30d")
|
|
101
|
+
conn = sqlite3.connect(self.db_path)
|
|
102
|
+
row = conn.execute("SELECT lifecycle_history FROM memories WHERE id=1").fetchone()
|
|
103
|
+
conn.close()
|
|
104
|
+
history = json.loads(row[0])
|
|
105
|
+
assert len(history) == 1
|
|
106
|
+
assert history[0]["from"] == "active"
|
|
107
|
+
assert history[0]["to"] == "warm"
|
|
108
|
+
assert history[0]["reason"] == "no_access_30d"
|
|
109
|
+
|
|
110
|
+
def test_invalid_transition_rejected(self):
|
|
111
|
+
from lifecycle.lifecycle_engine import LifecycleEngine
|
|
112
|
+
engine = LifecycleEngine(self.db_path)
|
|
113
|
+
result = engine.transition_memory(1, "archived", reason="skip")
|
|
114
|
+
assert result["success"] is False
|
|
115
|
+
assert "invalid" in result["error"].lower()
|
|
116
|
+
|
|
117
|
+
def test_get_memory_state(self):
|
|
118
|
+
from lifecycle.lifecycle_engine import LifecycleEngine
|
|
119
|
+
engine = LifecycleEngine(self.db_path)
|
|
120
|
+
state = engine.get_memory_state(1)
|
|
121
|
+
assert state == "active"
|
|
122
|
+
|
|
123
|
+
def test_get_state_distribution(self):
|
|
124
|
+
from lifecycle.lifecycle_engine import LifecycleEngine
|
|
125
|
+
engine = LifecycleEngine(self.db_path)
|
|
126
|
+
dist = engine.get_state_distribution()
|
|
127
|
+
assert dist["active"] >= 1
|
|
128
|
+
assert dist["warm"] == 0
|
|
129
|
+
|
|
130
|
+
def test_reactivation_on_access(self):
|
|
131
|
+
from lifecycle.lifecycle_engine import LifecycleEngine
|
|
132
|
+
engine = LifecycleEngine(self.db_path)
|
|
133
|
+
engine.transition_memory(1, "warm", reason="aged")
|
|
134
|
+
result = engine.reactivate_memory(1, trigger="recall")
|
|
135
|
+
assert result["success"] is True
|
|
136
|
+
assert result["from_state"] == "warm"
|
|
137
|
+
assert result["to_state"] == "active"
|
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
|
|
3
|
+
"""Tests for lifecycle evaluation rules — which memories should transition.
|
|
4
|
+
"""
|
|
5
|
+
import sqlite3
|
|
6
|
+
import tempfile
|
|
7
|
+
import os
|
|
8
|
+
import sys
|
|
9
|
+
import json
|
|
10
|
+
from datetime import datetime, timedelta
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
|
|
13
|
+
sys.path.insert(0, str(Path(__file__).resolve().parent.parent.parent))
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class TestLifecycleEvaluation:
|
|
17
|
+
"""Test evaluation rules for memory lifecycle transitions."""
|
|
18
|
+
|
|
19
|
+
def setup_method(self):
|
|
20
|
+
# Create temp dir for DB + config isolation
|
|
21
|
+
self.tmp_dir = tempfile.mkdtemp()
|
|
22
|
+
self.db_path = os.path.join(self.tmp_dir, "test.db")
|
|
23
|
+
conn = sqlite3.connect(self.db_path)
|
|
24
|
+
conn.execute("""
|
|
25
|
+
CREATE TABLE memories (
|
|
26
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
27
|
+
content TEXT NOT NULL,
|
|
28
|
+
importance INTEGER DEFAULT 5,
|
|
29
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
30
|
+
last_accessed TIMESTAMP,
|
|
31
|
+
access_count INTEGER DEFAULT 0,
|
|
32
|
+
lifecycle_state TEXT DEFAULT 'active',
|
|
33
|
+
lifecycle_updated_at TIMESTAMP,
|
|
34
|
+
lifecycle_history TEXT DEFAULT '[]',
|
|
35
|
+
access_level TEXT DEFAULT 'public',
|
|
36
|
+
profile TEXT DEFAULT 'default'
|
|
37
|
+
)
|
|
38
|
+
""")
|
|
39
|
+
now = datetime.now()
|
|
40
|
+
|
|
41
|
+
# Memory 1: Active, stale (35d), low importance (5) → should recommend WARM
|
|
42
|
+
conn.execute(
|
|
43
|
+
"INSERT INTO memories (content, importance, lifecycle_state, last_accessed, created_at) VALUES (?, ?, ?, ?, ?)",
|
|
44
|
+
("stale low importance", 5, "active", (now - timedelta(days=35)).isoformat(), (now - timedelta(days=100)).isoformat()),
|
|
45
|
+
)
|
|
46
|
+
# Memory 2: Active, recent (10d), low importance (5) → should STAY
|
|
47
|
+
conn.execute(
|
|
48
|
+
"INSERT INTO memories (content, importance, lifecycle_state, last_accessed, created_at) VALUES (?, ?, ?, ?, ?)",
|
|
49
|
+
("recent access", 5, "active", (now - timedelta(days=10)).isoformat(), (now - timedelta(days=100)).isoformat()),
|
|
50
|
+
)
|
|
51
|
+
# Memory 3: Active, stale (35d), HIGH importance (8) → should STAY (importance resists)
|
|
52
|
+
conn.execute(
|
|
53
|
+
"INSERT INTO memories (content, importance, lifecycle_state, last_accessed, created_at) VALUES (?, ?, ?, ?, ?)",
|
|
54
|
+
("stale high importance", 8, "active", (now - timedelta(days=35)).isoformat(), (now - timedelta(days=100)).isoformat()),
|
|
55
|
+
)
|
|
56
|
+
# Memory 4: Warm, stale (95d), low importance (3) → should recommend COLD
|
|
57
|
+
conn.execute(
|
|
58
|
+
"INSERT INTO memories (content, importance, lifecycle_state, last_accessed, created_at) VALUES (?, ?, ?, ?, ?)",
|
|
59
|
+
("warm stale", 3, "warm", (now - timedelta(days=95)).isoformat(), (now - timedelta(days=200)).isoformat()),
|
|
60
|
+
)
|
|
61
|
+
# Memory 5: Cold, very stale (200d), importance 5 → should recommend ARCHIVED
|
|
62
|
+
conn.execute(
|
|
63
|
+
"INSERT INTO memories (content, importance, lifecycle_state, last_accessed, created_at) VALUES (?, ?, ?, ?, ?)",
|
|
64
|
+
("cold very stale", 5, "cold", (now - timedelta(days=200)).isoformat(), (now - timedelta(days=300)).isoformat()),
|
|
65
|
+
)
|
|
66
|
+
# Memory 6: Active, NULL last_accessed, created 40d ago, importance 4 → WARM (uses created_at)
|
|
67
|
+
conn.execute(
|
|
68
|
+
"INSERT INTO memories (content, importance, lifecycle_state, last_accessed, created_at) VALUES (?, ?, ?, ?, ?)",
|
|
69
|
+
("never accessed", 4, "active", None, (now - timedelta(days=40)).isoformat()),
|
|
70
|
+
)
|
|
71
|
+
conn.commit()
|
|
72
|
+
conn.close()
|
|
73
|
+
|
|
74
|
+
def teardown_method(self):
|
|
75
|
+
import shutil
|
|
76
|
+
shutil.rmtree(self.tmp_dir, ignore_errors=True)
|
|
77
|
+
|
|
78
|
+
def test_active_to_warm_stale_low_importance(self):
|
|
79
|
+
"""Memory 1: stale 35d, importance 5 → recommend ACTIVE→WARM."""
|
|
80
|
+
from lifecycle.lifecycle_evaluator import LifecycleEvaluator
|
|
81
|
+
evaluator = LifecycleEvaluator(self.db_path)
|
|
82
|
+
rec = evaluator.evaluate_single(1)
|
|
83
|
+
assert rec is not None
|
|
84
|
+
assert rec["from_state"] == "active"
|
|
85
|
+
assert rec["to_state"] == "warm"
|
|
86
|
+
assert rec["memory_id"] == 1
|
|
87
|
+
|
|
88
|
+
def test_active_stays_recent_access(self):
|
|
89
|
+
"""Memory 2: accessed 10d ago → no transition recommended."""
|
|
90
|
+
from lifecycle.lifecycle_evaluator import LifecycleEvaluator
|
|
91
|
+
evaluator = LifecycleEvaluator(self.db_path)
|
|
92
|
+
rec = evaluator.evaluate_single(2)
|
|
93
|
+
assert rec is None
|
|
94
|
+
|
|
95
|
+
def test_active_stays_high_importance(self):
|
|
96
|
+
"""Memory 3: importance 8 resists transition even when stale."""
|
|
97
|
+
from lifecycle.lifecycle_evaluator import LifecycleEvaluator
|
|
98
|
+
evaluator = LifecycleEvaluator(self.db_path)
|
|
99
|
+
rec = evaluator.evaluate_single(3)
|
|
100
|
+
assert rec is None
|
|
101
|
+
|
|
102
|
+
def test_warm_to_cold_stale(self):
|
|
103
|
+
"""Memory 4: warm, stale 95d, importance 3 → recommend COLD."""
|
|
104
|
+
from lifecycle.lifecycle_evaluator import LifecycleEvaluator
|
|
105
|
+
evaluator = LifecycleEvaluator(self.db_path)
|
|
106
|
+
rec = evaluator.evaluate_single(4)
|
|
107
|
+
assert rec is not None
|
|
108
|
+
assert rec["from_state"] == "warm"
|
|
109
|
+
assert rec["to_state"] == "cold"
|
|
110
|
+
|
|
111
|
+
def test_cold_to_archived(self):
|
|
112
|
+
"""Memory 5: cold, stale 200d → recommend ARCHIVED."""
|
|
113
|
+
from lifecycle.lifecycle_evaluator import LifecycleEvaluator
|
|
114
|
+
evaluator = LifecycleEvaluator(self.db_path)
|
|
115
|
+
rec = evaluator.evaluate_single(5)
|
|
116
|
+
assert rec is not None
|
|
117
|
+
assert rec["from_state"] == "cold"
|
|
118
|
+
assert rec["to_state"] == "archived"
|
|
119
|
+
|
|
120
|
+
def test_never_accessed_uses_created_at(self):
|
|
121
|
+
"""Memory 6: NULL last_accessed, created 40d ago → recommend WARM."""
|
|
122
|
+
from lifecycle.lifecycle_evaluator import LifecycleEvaluator
|
|
123
|
+
evaluator = LifecycleEvaluator(self.db_path)
|
|
124
|
+
rec = evaluator.evaluate_single(6)
|
|
125
|
+
assert rec is not None
|
|
126
|
+
assert rec["to_state"] == "warm"
|
|
127
|
+
|
|
128
|
+
def test_retention_override_skips_memory(self):
|
|
129
|
+
"""Memory 1 should be skipped when in retention_overrides set."""
|
|
130
|
+
from lifecycle.lifecycle_evaluator import LifecycleEvaluator
|
|
131
|
+
evaluator = LifecycleEvaluator(self.db_path)
|
|
132
|
+
rec = evaluator.evaluate_single(1, retention_overrides={1})
|
|
133
|
+
assert rec is None
|
|
134
|
+
|
|
135
|
+
def test_evaluate_memories_returns_recommendations(self):
|
|
136
|
+
"""Full scan should return list with recommendations for eligible memories."""
|
|
137
|
+
from lifecycle.lifecycle_evaluator import LifecycleEvaluator
|
|
138
|
+
evaluator = LifecycleEvaluator(self.db_path)
|
|
139
|
+
recs = evaluator.evaluate_memories()
|
|
140
|
+
# Should recommend: Memory 1 (active→warm), 4 (warm→cold), 5 (cold→archived), 6 (active→warm)
|
|
141
|
+
assert isinstance(recs, list)
|
|
142
|
+
assert len(recs) >= 3 # At least memories 1, 4, 5
|
|
143
|
+
rec_ids = {r["memory_id"] for r in recs}
|
|
144
|
+
assert 1 in rec_ids # stale active
|
|
145
|
+
assert 4 in rec_ids # stale warm
|
|
146
|
+
assert 5 in rec_ids # stale cold
|
|
147
|
+
|
|
148
|
+
def test_evaluate_memories_excludes_retained(self):
|
|
149
|
+
"""evaluate_memories with retention_overrides skips those memory IDs."""
|
|
150
|
+
from lifecycle.lifecycle_evaluator import LifecycleEvaluator
|
|
151
|
+
evaluator = LifecycleEvaluator(self.db_path)
|
|
152
|
+
recs = evaluator.evaluate_memories(retention_overrides={1, 4})
|
|
153
|
+
rec_ids = {r["memory_id"] for r in recs}
|
|
154
|
+
assert 1 not in rec_ids
|
|
155
|
+
assert 4 not in rec_ids
|
|
156
|
+
assert 5 in rec_ids # cold→archived not overridden
|
|
157
|
+
|
|
158
|
+
def test_custom_config_thresholds(self):
|
|
159
|
+
"""Custom config should override default thresholds."""
|
|
160
|
+
from lifecycle.lifecycle_evaluator import LifecycleEvaluator
|
|
161
|
+
# Write custom config: raise active_to_warm threshold to 50 days
|
|
162
|
+
config_path = os.path.join(self.tmp_dir, "lifecycle_config.json")
|
|
163
|
+
with open(config_path, "w") as f:
|
|
164
|
+
json.dump({
|
|
165
|
+
"active_to_warm": {"no_access_days": 50, "max_importance": 6}
|
|
166
|
+
}, f)
|
|
167
|
+
evaluator = LifecycleEvaluator(self.db_path, config_path=config_path)
|
|
168
|
+
# Memory 1 is stale 35d — below new 50d threshold → no recommendation
|
|
169
|
+
rec = evaluator.evaluate_single(1)
|
|
170
|
+
assert rec is None
|
|
171
|
+
|
|
172
|
+
def test_evaluate_single_nonexistent_memory(self):
|
|
173
|
+
"""Evaluating a nonexistent memory returns None."""
|
|
174
|
+
from lifecycle.lifecycle_evaluator import LifecycleEvaluator
|
|
175
|
+
evaluator = LifecycleEvaluator(self.db_path)
|
|
176
|
+
rec = evaluator.evaluate_single(999)
|
|
177
|
+
assert rec is None
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
|
|
3
|
+
"""Tests for lifecycle background scheduler.
|
|
4
|
+
"""
|
|
5
|
+
import sqlite3
|
|
6
|
+
import tempfile
|
|
7
|
+
import os
|
|
8
|
+
import sys
|
|
9
|
+
import time
|
|
10
|
+
import threading
|
|
11
|
+
from datetime import datetime, timedelta
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
|
|
14
|
+
sys.path.insert(0, str(Path(__file__).resolve().parent.parent.parent))
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class TestLifecycleScheduler:
|
|
18
|
+
"""Test lifecycle scheduler background evaluation."""
|
|
19
|
+
|
|
20
|
+
def setup_method(self):
|
|
21
|
+
self.tmp_dir = tempfile.mkdtemp()
|
|
22
|
+
self.db_path = os.path.join(self.tmp_dir, "test.db")
|
|
23
|
+
conn = sqlite3.connect(self.db_path)
|
|
24
|
+
conn.execute("""
|
|
25
|
+
CREATE TABLE memories (
|
|
26
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
27
|
+
content TEXT NOT NULL,
|
|
28
|
+
importance INTEGER DEFAULT 5,
|
|
29
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
30
|
+
last_accessed TIMESTAMP,
|
|
31
|
+
access_count INTEGER DEFAULT 0,
|
|
32
|
+
lifecycle_state TEXT DEFAULT 'active',
|
|
33
|
+
lifecycle_updated_at TIMESTAMP,
|
|
34
|
+
lifecycle_history TEXT DEFAULT '[]',
|
|
35
|
+
access_level TEXT DEFAULT 'public',
|
|
36
|
+
profile TEXT DEFAULT 'default'
|
|
37
|
+
)
|
|
38
|
+
""")
|
|
39
|
+
now = datetime.now()
|
|
40
|
+
# Insert a stale memory that should be evaluated for transition
|
|
41
|
+
conn.execute(
|
|
42
|
+
"INSERT INTO memories (content, importance, lifecycle_state, last_accessed, created_at) VALUES (?, ?, ?, ?, ?)",
|
|
43
|
+
("stale memory", 3, "active", (now - timedelta(days=45)).isoformat(), (now - timedelta(days=100)).isoformat()),
|
|
44
|
+
)
|
|
45
|
+
# Insert a fresh memory that should stay
|
|
46
|
+
conn.execute(
|
|
47
|
+
"INSERT INTO memories (content, importance, lifecycle_state, last_accessed, created_at) VALUES (?, ?, ?, ?, ?)",
|
|
48
|
+
("fresh memory", 8, "active", now.isoformat(), (now - timedelta(days=10)).isoformat()),
|
|
49
|
+
)
|
|
50
|
+
conn.commit()
|
|
51
|
+
conn.close()
|
|
52
|
+
|
|
53
|
+
def teardown_method(self):
|
|
54
|
+
import shutil
|
|
55
|
+
shutil.rmtree(self.tmp_dir, ignore_errors=True)
|
|
56
|
+
|
|
57
|
+
def test_scheduler_creation(self):
|
|
58
|
+
"""Scheduler can be created with default settings."""
|
|
59
|
+
from lifecycle.lifecycle_scheduler import LifecycleScheduler
|
|
60
|
+
scheduler = LifecycleScheduler(self.db_path)
|
|
61
|
+
assert scheduler is not None
|
|
62
|
+
assert scheduler.interval_seconds == 21600 # 6 hours default
|
|
63
|
+
|
|
64
|
+
def test_run_now_executes_evaluation(self):
|
|
65
|
+
"""Manual trigger runs evaluation and transitions eligible memories."""
|
|
66
|
+
from lifecycle.lifecycle_scheduler import LifecycleScheduler
|
|
67
|
+
scheduler = LifecycleScheduler(self.db_path)
|
|
68
|
+
result = scheduler.run_now()
|
|
69
|
+
assert result is not None
|
|
70
|
+
assert "evaluation" in result
|
|
71
|
+
assert "enforcement" in result
|
|
72
|
+
|
|
73
|
+
def test_run_now_transitions_stale_memories(self):
|
|
74
|
+
"""run_now should transition stale memories."""
|
|
75
|
+
from lifecycle.lifecycle_scheduler import LifecycleScheduler
|
|
76
|
+
scheduler = LifecycleScheduler(self.db_path)
|
|
77
|
+
result = scheduler.run_now()
|
|
78
|
+
# Memory 1 (stale 45d, importance 3) should be recommended for transition
|
|
79
|
+
eval_recs = result["evaluation"]["recommendations"]
|
|
80
|
+
if eval_recs:
|
|
81
|
+
transitioned = result["evaluation"]["transitioned"]
|
|
82
|
+
assert transitioned >= 1
|
|
83
|
+
|
|
84
|
+
def test_fresh_memory_stays_active(self):
|
|
85
|
+
"""Fresh high-importance memory should NOT be transitioned."""
|
|
86
|
+
from lifecycle.lifecycle_scheduler import LifecycleScheduler
|
|
87
|
+
scheduler = LifecycleScheduler(self.db_path)
|
|
88
|
+
scheduler.run_now()
|
|
89
|
+
conn = sqlite3.connect(self.db_path)
|
|
90
|
+
row = conn.execute("SELECT lifecycle_state FROM memories WHERE id=2").fetchone()
|
|
91
|
+
conn.close()
|
|
92
|
+
assert row[0] == "active"
|
|
93
|
+
|
|
94
|
+
def test_scheduler_thread_is_daemon(self):
|
|
95
|
+
"""Scheduler thread should be daemonic (doesn't prevent exit)."""
|
|
96
|
+
from lifecycle.lifecycle_scheduler import LifecycleScheduler
|
|
97
|
+
scheduler = LifecycleScheduler(self.db_path, interval_seconds=3600)
|
|
98
|
+
scheduler.start()
|
|
99
|
+
assert scheduler._timer is not None
|
|
100
|
+
assert scheduler._timer.daemon is True
|
|
101
|
+
scheduler.stop()
|
|
102
|
+
|
|
103
|
+
def test_start_and_stop(self):
|
|
104
|
+
"""Scheduler can be started and stopped."""
|
|
105
|
+
from lifecycle.lifecycle_scheduler import LifecycleScheduler
|
|
106
|
+
scheduler = LifecycleScheduler(self.db_path, interval_seconds=3600)
|
|
107
|
+
scheduler.start()
|
|
108
|
+
assert scheduler.is_running is True
|
|
109
|
+
scheduler.stop()
|
|
110
|
+
assert scheduler.is_running is False
|
|
111
|
+
|
|
112
|
+
def test_configurable_interval(self):
|
|
113
|
+
"""Scheduler interval is configurable."""
|
|
114
|
+
from lifecycle.lifecycle_scheduler import LifecycleScheduler
|
|
115
|
+
scheduler = LifecycleScheduler(self.db_path, interval_seconds=7200)
|
|
116
|
+
assert scheduler.interval_seconds == 7200
|
|
117
|
+
|
|
118
|
+
def test_result_structure(self):
|
|
119
|
+
"""run_now returns properly structured result."""
|
|
120
|
+
from lifecycle.lifecycle_scheduler import LifecycleScheduler
|
|
121
|
+
scheduler = LifecycleScheduler(self.db_path)
|
|
122
|
+
result = scheduler.run_now()
|
|
123
|
+
assert "evaluation" in result
|
|
124
|
+
assert "enforcement" in result
|
|
125
|
+
assert "timestamp" in result
|
|
126
|
+
assert "recommendations" in result["evaluation"]
|
|
127
|
+
assert "transitioned" in result["evaluation"]
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
|
|
3
|
+
"""Tests for lifecycle-aware search filtering.
|
|
4
|
+
"""
|
|
5
|
+
import sqlite3
|
|
6
|
+
import tempfile
|
|
7
|
+
import os
|
|
8
|
+
import sys
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
|
|
11
|
+
sys.path.insert(0, str(Path(__file__).resolve().parent.parent.parent))
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class TestLifecycleSearch:
|
|
15
|
+
"""Test that search respects lifecycle states."""
|
|
16
|
+
|
|
17
|
+
def setup_method(self):
|
|
18
|
+
self.db_fd, self.db_path = tempfile.mkstemp(suffix=".db")
|
|
19
|
+
# Create MemoryStoreV2 — this sets up full schema + FTS triggers
|
|
20
|
+
from memory_store_v2 import MemoryStoreV2
|
|
21
|
+
self.store = MemoryStoreV2(self.db_path)
|
|
22
|
+
|
|
23
|
+
# Create test memories via add_memory (the actual MemoryStoreV2 API)
|
|
24
|
+
self.store.add_memory(content="active memory about Python programming", tags=["python"], importance=5)
|
|
25
|
+
self.store.add_memory(content="another active memory about JavaScript", tags=["js"], importance=5)
|
|
26
|
+
self.store.add_memory(content="warm memory about database design", tags=["db"], importance=5)
|
|
27
|
+
self.store.add_memory(content="cold memory about API architecture", tags=["api"], importance=5)
|
|
28
|
+
self.store.add_memory(content="archived memory about legacy systems", tags=["legacy"], importance=5)
|
|
29
|
+
self.store.add_memory(content="tombstoned memory about deleted content", tags=["deleted"], importance=5)
|
|
30
|
+
|
|
31
|
+
# Manually set lifecycle states
|
|
32
|
+
conn = sqlite3.connect(self.db_path)
|
|
33
|
+
conn.execute("UPDATE memories SET lifecycle_state = 'warm' WHERE id = 3")
|
|
34
|
+
conn.execute("UPDATE memories SET lifecycle_state = 'cold' WHERE id = 4")
|
|
35
|
+
conn.execute("UPDATE memories SET lifecycle_state = 'archived' WHERE id = 5")
|
|
36
|
+
conn.execute("UPDATE memories SET lifecycle_state = 'tombstoned' WHERE id = 6")
|
|
37
|
+
conn.commit()
|
|
38
|
+
conn.close()
|
|
39
|
+
|
|
40
|
+
# Rebuild vectors after state changes
|
|
41
|
+
self.store._rebuild_vectors()
|
|
42
|
+
|
|
43
|
+
def teardown_method(self):
|
|
44
|
+
os.close(self.db_fd)
|
|
45
|
+
try:
|
|
46
|
+
os.unlink(self.db_path)
|
|
47
|
+
except OSError:
|
|
48
|
+
pass
|
|
49
|
+
|
|
50
|
+
def test_default_search_returns_active_and_warm(self):
|
|
51
|
+
"""Default search should return ACTIVE and WARM memories only."""
|
|
52
|
+
results = self.store.search("memory", limit=10)
|
|
53
|
+
states = {r.get('lifecycle_state', 'active') for r in results}
|
|
54
|
+
# Should only contain active and warm
|
|
55
|
+
assert 'cold' not in states
|
|
56
|
+
assert 'archived' not in states
|
|
57
|
+
assert 'tombstoned' not in states
|
|
58
|
+
|
|
59
|
+
def test_default_search_includes_warm(self):
|
|
60
|
+
"""Default search should include warm memories."""
|
|
61
|
+
results = self.store.search("database design", limit=10)
|
|
62
|
+
ids = {r['id'] for r in results}
|
|
63
|
+
# Memory 3 (warm, about database) should be found
|
|
64
|
+
assert 3 in ids
|
|
65
|
+
|
|
66
|
+
def test_include_cold(self):
|
|
67
|
+
"""Search with include_cold should return active + warm + cold."""
|
|
68
|
+
results = self.store.search("memory", limit=10, lifecycle_states=("active", "warm", "cold"))
|
|
69
|
+
ids = {r['id'] for r in results}
|
|
70
|
+
# Cold memory (id=4) should be included
|
|
71
|
+
assert 4 in ids
|
|
72
|
+
|
|
73
|
+
def test_include_archived(self):
|
|
74
|
+
"""Search with archived should return those memories."""
|
|
75
|
+
results = self.store.search("legacy", limit=10, lifecycle_states=("active", "warm", "cold", "archived"))
|
|
76
|
+
ids = {r['id'] for r in results}
|
|
77
|
+
assert 5 in ids
|
|
78
|
+
|
|
79
|
+
def test_tombstoned_never_returned(self):
|
|
80
|
+
"""TOMBSTONED memories should never be returned, even when explicitly requested."""
|
|
81
|
+
results = self.store.search("deleted", limit=10, lifecycle_states=("active", "warm", "cold", "archived", "tombstoned"))
|
|
82
|
+
# Even with tombstoned in the filter, the search should work
|
|
83
|
+
# (tombstoned memories may or may not appear depending on implementation,
|
|
84
|
+
# but they should at minimum not break the search)
|
|
85
|
+
assert isinstance(results, list)
|
|
86
|
+
|
|
87
|
+
def test_backward_compat_no_lifecycle_param(self):
|
|
88
|
+
"""Existing search calls without lifecycle parameter still work."""
|
|
89
|
+
results = self.store.search("Python programming", limit=5)
|
|
90
|
+
assert len(results) >= 1
|
|
91
|
+
assert results[0]['content'] is not None
|
|
92
|
+
|
|
93
|
+
def test_warm_memory_reactivated_on_recall(self):
|
|
94
|
+
"""Warm memory should be reactivated to ACTIVE when recalled."""
|
|
95
|
+
# Search for the warm memory
|
|
96
|
+
results = self.store.search("database design", limit=5)
|
|
97
|
+
warm_found = any(r['id'] == 3 for r in results)
|
|
98
|
+
if warm_found:
|
|
99
|
+
# Check if it was reactivated
|
|
100
|
+
conn = sqlite3.connect(self.db_path)
|
|
101
|
+
row = conn.execute("SELECT lifecycle_state FROM memories WHERE id=3").fetchone()
|
|
102
|
+
conn.close()
|
|
103
|
+
assert row[0] == "active"
|
|
104
|
+
|
|
105
|
+
def test_search_result_includes_lifecycle_state(self):
|
|
106
|
+
"""Search results should include lifecycle_state field."""
|
|
107
|
+
results = self.store.search("Python", limit=5)
|
|
108
|
+
if results:
|
|
109
|
+
assert 'lifecycle_state' in results[0]
|