superlocalmemory 2.7.6 → 2.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +120 -155
- package/README.md +115 -89
- package/api_server.py +2 -12
- package/docs/PATTERN-LEARNING.md +64 -199
- package/docs/example_graph_usage.py +4 -6
- package/install.sh +59 -0
- package/mcp_server.py +83 -7
- package/package.json +1 -8
- package/scripts/generate-thumbnails.py +3 -5
- package/skills/slm-build-graph/SKILL.md +1 -1
- package/skills/slm-list-recent/SKILL.md +1 -1
- package/skills/slm-recall/SKILL.md +1 -1
- package/skills/slm-remember/SKILL.md +1 -1
- package/skills/slm-show-patterns/SKILL.md +1 -1
- package/skills/slm-status/SKILL.md +1 -1
- package/skills/slm-switch-profile/SKILL.md +1 -1
- package/src/agent_registry.py +7 -18
- package/src/auth_middleware.py +3 -5
- package/src/auto_backup.py +3 -7
- package/src/behavioral/__init__.py +49 -0
- package/src/behavioral/behavioral_listener.py +203 -0
- package/src/behavioral/behavioral_patterns.py +275 -0
- package/src/behavioral/cross_project_transfer.py +206 -0
- package/src/behavioral/outcome_inference.py +194 -0
- package/src/behavioral/outcome_tracker.py +193 -0
- package/src/behavioral/tests/__init__.py +4 -0
- package/src/behavioral/tests/test_behavioral_integration.py +108 -0
- package/src/behavioral/tests/test_behavioral_patterns.py +150 -0
- package/src/behavioral/tests/test_cross_project_transfer.py +142 -0
- package/src/behavioral/tests/test_mcp_behavioral.py +139 -0
- package/src/behavioral/tests/test_mcp_report_outcome.py +117 -0
- package/src/behavioral/tests/test_outcome_inference.py +107 -0
- package/src/behavioral/tests/test_outcome_tracker.py +96 -0
- package/src/cache_manager.py +4 -6
- package/src/compliance/__init__.py +48 -0
- package/src/compliance/abac_engine.py +149 -0
- package/src/compliance/abac_middleware.py +116 -0
- package/src/compliance/audit_db.py +215 -0
- package/src/compliance/audit_logger.py +148 -0
- package/src/compliance/retention_manager.py +289 -0
- package/src/compliance/retention_scheduler.py +186 -0
- package/src/compliance/tests/__init__.py +4 -0
- package/src/compliance/tests/test_abac_enforcement.py +95 -0
- package/src/compliance/tests/test_abac_engine.py +124 -0
- package/src/compliance/tests/test_abac_mcp_integration.py +118 -0
- package/src/compliance/tests/test_audit_db.py +123 -0
- package/src/compliance/tests/test_audit_logger.py +98 -0
- package/src/compliance/tests/test_mcp_audit.py +128 -0
- package/src/compliance/tests/test_mcp_retention_policy.py +125 -0
- package/src/compliance/tests/test_retention_manager.py +131 -0
- package/src/compliance/tests/test_retention_scheduler.py +99 -0
- package/src/db_connection_manager.py +2 -12
- package/src/embedding_engine.py +61 -669
- package/src/embeddings/__init__.py +47 -0
- package/src/embeddings/cache.py +70 -0
- package/src/embeddings/cli.py +113 -0
- package/src/embeddings/constants.py +47 -0
- package/src/embeddings/database.py +91 -0
- package/src/embeddings/engine.py +247 -0
- package/src/embeddings/model_loader.py +145 -0
- package/src/event_bus.py +3 -13
- package/src/graph/__init__.py +36 -0
- package/src/graph/build_helpers.py +74 -0
- package/src/graph/cli.py +87 -0
- package/src/graph/cluster_builder.py +188 -0
- package/src/graph/cluster_summary.py +148 -0
- package/src/graph/constants.py +47 -0
- package/src/graph/edge_builder.py +162 -0
- package/src/graph/entity_extractor.py +95 -0
- package/src/graph/graph_core.py +226 -0
- package/src/graph/graph_search.py +231 -0
- package/src/graph/hierarchical.py +207 -0
- package/src/graph/schema.py +99 -0
- package/src/graph_engine.py +45 -1451
- package/src/hnsw_index.py +3 -7
- package/src/hybrid_search.py +36 -683
- package/src/learning/__init__.py +27 -12
- package/src/learning/adaptive_ranker.py +50 -12
- package/src/learning/cross_project_aggregator.py +2 -12
- package/src/learning/engagement_tracker.py +2 -12
- package/src/learning/feature_extractor.py +175 -43
- package/src/learning/feedback_collector.py +7 -12
- package/src/learning/learning_db.py +180 -12
- package/src/learning/project_context_manager.py +2 -12
- package/src/learning/source_quality_scorer.py +2 -12
- package/src/learning/synthetic_bootstrap.py +2 -12
- package/src/learning/tests/__init__.py +2 -0
- package/src/learning/tests/test_adaptive_ranker.py +2 -6
- package/src/learning/tests/test_adaptive_ranker_v28.py +60 -0
- package/src/learning/tests/test_aggregator.py +2 -6
- package/src/learning/tests/test_auto_retrain_v28.py +35 -0
- package/src/learning/tests/test_e2e_ranking_v28.py +82 -0
- package/src/learning/tests/test_feature_extractor_v28.py +93 -0
- package/src/learning/tests/test_feedback_collector.py +2 -6
- package/src/learning/tests/test_learning_db.py +2 -6
- package/src/learning/tests/test_learning_db_v28.py +110 -0
- package/src/learning/tests/test_learning_init_v28.py +48 -0
- package/src/learning/tests/test_outcome_signals.py +48 -0
- package/src/learning/tests/test_project_context.py +2 -6
- package/src/learning/tests/test_schema_migration.py +319 -0
- package/src/learning/tests/test_signal_inference.py +11 -13
- package/src/learning/tests/test_source_quality.py +2 -6
- package/src/learning/tests/test_synthetic_bootstrap.py +3 -7
- package/src/learning/tests/test_workflow_miner.py +2 -6
- package/src/learning/workflow_pattern_miner.py +2 -12
- package/src/lifecycle/__init__.py +54 -0
- package/src/lifecycle/bounded_growth.py +239 -0
- package/src/lifecycle/compaction_engine.py +226 -0
- package/src/lifecycle/lifecycle_engine.py +302 -0
- package/src/lifecycle/lifecycle_evaluator.py +225 -0
- package/src/lifecycle/lifecycle_scheduler.py +130 -0
- package/src/lifecycle/retention_policy.py +285 -0
- package/src/lifecycle/tests/__init__.py +4 -0
- package/src/lifecycle/tests/test_bounded_growth.py +193 -0
- package/src/lifecycle/tests/test_compaction.py +179 -0
- package/src/lifecycle/tests/test_lifecycle_engine.py +137 -0
- package/src/lifecycle/tests/test_lifecycle_evaluation.py +177 -0
- package/src/lifecycle/tests/test_lifecycle_scheduler.py +127 -0
- package/src/lifecycle/tests/test_lifecycle_search.py +109 -0
- package/src/lifecycle/tests/test_mcp_compact.py +149 -0
- package/src/lifecycle/tests/test_mcp_lifecycle_status.py +114 -0
- package/src/lifecycle/tests/test_retention_policy.py +162 -0
- package/src/mcp_tools_v28.py +280 -0
- package/src/memory-profiles.py +2 -12
- package/src/memory-reset.py +2 -12
- package/src/memory_compression.py +2 -12
- package/src/memory_store_v2.py +76 -20
- package/src/migrate_v1_to_v2.py +2 -12
- package/src/pattern_learner.py +29 -975
- package/src/patterns/__init__.py +24 -0
- package/src/patterns/analyzers.py +247 -0
- package/src/patterns/learner.py +267 -0
- package/src/patterns/scoring.py +167 -0
- package/src/patterns/store.py +223 -0
- package/src/patterns/terminology.py +138 -0
- package/src/provenance_tracker.py +4 -14
- package/src/query_optimizer.py +4 -6
- package/src/rate_limiter.py +2 -6
- package/src/search/__init__.py +20 -0
- package/src/search/cli.py +77 -0
- package/src/search/constants.py +26 -0
- package/src/search/engine.py +239 -0
- package/src/search/fusion.py +122 -0
- package/src/search/index_loader.py +112 -0
- package/src/search/methods.py +162 -0
- package/src/search_engine_v2.py +4 -6
- package/src/setup_validator.py +7 -13
- package/src/subscription_manager.py +2 -12
- package/src/tree/__init__.py +59 -0
- package/src/tree/builder.py +183 -0
- package/src/tree/nodes.py +196 -0
- package/src/tree/queries.py +252 -0
- package/src/tree/schema.py +76 -0
- package/src/tree_manager.py +10 -711
- package/src/trust/__init__.py +45 -0
- package/src/trust/constants.py +66 -0
- package/src/trust/queries.py +157 -0
- package/src/trust/schema.py +95 -0
- package/src/trust/scorer.py +299 -0
- package/src/trust/signals.py +95 -0
- package/src/trust_scorer.py +39 -697
- package/src/webhook_dispatcher.py +2 -12
- package/ui/app.js +1 -1
- package/ui/js/agents.js +1 -1
- package/ui_server.py +2 -14
- package/ATTRIBUTION.md +0 -140
- package/docs/ARCHITECTURE-V2.5.md +0 -190
- package/docs/GRAPH-ENGINE.md +0 -503
- package/docs/architecture-diagram.drawio +0 -405
- package/docs/plans/2026-02-13-benchmark-suite.md +0 -1349
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
|
|
3
|
+
"""Tests for v2.8 learning.db schema extensions — outcome and behavioral tables.
|
|
4
|
+
"""
|
|
5
|
+
import pytest
|
|
6
|
+
import json
|
|
7
|
+
import sys
|
|
8
|
+
import os
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
|
|
11
|
+
sys.path.insert(0, str(Path(__file__).resolve().parent.parent.parent))
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@pytest.fixture(autouse=True)
|
|
15
|
+
def reset_singleton():
|
|
16
|
+
from learning.learning_db import LearningDB
|
|
17
|
+
LearningDB.reset_instance()
|
|
18
|
+
yield
|
|
19
|
+
LearningDB.reset_instance()
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@pytest.fixture
|
|
23
|
+
def learning_db(tmp_path):
|
|
24
|
+
from learning.learning_db import LearningDB
|
|
25
|
+
db_path = tmp_path / "learning.db"
|
|
26
|
+
return LearningDB(db_path=db_path)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class TestActionOutcomesTable:
|
|
30
|
+
def test_table_exists(self, learning_db):
|
|
31
|
+
conn = learning_db._get_connection()
|
|
32
|
+
tables = [r[0] for r in conn.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall()]
|
|
33
|
+
conn.close()
|
|
34
|
+
assert "action_outcomes" in tables
|
|
35
|
+
|
|
36
|
+
def test_store_outcome(self, learning_db):
|
|
37
|
+
oid = learning_db.store_outcome([1, 2], "success", action_type="code_written", project="myproject")
|
|
38
|
+
assert isinstance(oid, int)
|
|
39
|
+
assert oid > 0
|
|
40
|
+
|
|
41
|
+
def test_get_outcomes(self, learning_db):
|
|
42
|
+
learning_db.store_outcome([1], "success", project="proj1")
|
|
43
|
+
learning_db.store_outcome([2], "failure", project="proj1")
|
|
44
|
+
results = learning_db.get_outcomes(project="proj1")
|
|
45
|
+
assert len(results) == 2
|
|
46
|
+
|
|
47
|
+
def test_get_outcomes_by_memory_id(self, learning_db):
|
|
48
|
+
learning_db.store_outcome([1, 2], "success")
|
|
49
|
+
learning_db.store_outcome([3], "failure")
|
|
50
|
+
results = learning_db.get_outcomes(memory_id=1)
|
|
51
|
+
assert len(results) == 1
|
|
52
|
+
assert 1 in results[0]["memory_ids"]
|
|
53
|
+
|
|
54
|
+
def test_outcome_has_profile(self, learning_db):
|
|
55
|
+
learning_db.store_outcome([1], "success")
|
|
56
|
+
results = learning_db.get_outcomes()
|
|
57
|
+
assert results[0]["profile"] == "default"
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class TestBehavioralPatternsTable:
|
|
61
|
+
def test_table_exists(self, learning_db):
|
|
62
|
+
conn = learning_db._get_connection()
|
|
63
|
+
tables = [r[0] for r in conn.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall()]
|
|
64
|
+
conn.close()
|
|
65
|
+
assert "behavioral_patterns" in tables
|
|
66
|
+
|
|
67
|
+
def test_store_pattern(self, learning_db):
|
|
68
|
+
pid = learning_db.store_behavioral_pattern("tag_success", "python", success_rate=0.85, evidence_count=10, confidence=0.8)
|
|
69
|
+
assert isinstance(pid, int)
|
|
70
|
+
|
|
71
|
+
def test_get_patterns(self, learning_db):
|
|
72
|
+
learning_db.store_behavioral_pattern("tag_success", "python", confidence=0.8)
|
|
73
|
+
learning_db.store_behavioral_pattern("tag_success", "javascript", confidence=0.6)
|
|
74
|
+
results = learning_db.get_behavioral_patterns(pattern_type="tag_success")
|
|
75
|
+
assert len(results) == 2
|
|
76
|
+
|
|
77
|
+
def test_get_patterns_min_confidence(self, learning_db):
|
|
78
|
+
learning_db.store_behavioral_pattern("tag_success", "python", confidence=0.8)
|
|
79
|
+
learning_db.store_behavioral_pattern("tag_success", "javascript", confidence=0.3)
|
|
80
|
+
results = learning_db.get_behavioral_patterns(min_confidence=0.5)
|
|
81
|
+
assert len(results) == 1
|
|
82
|
+
assert results[0]["pattern_key"] == "python"
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
class TestCrossProjectTable:
|
|
86
|
+
def test_table_exists(self, learning_db):
|
|
87
|
+
conn = learning_db._get_connection()
|
|
88
|
+
tables = [r[0] for r in conn.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall()]
|
|
89
|
+
conn.close()
|
|
90
|
+
assert "cross_project_behaviors" in tables
|
|
91
|
+
|
|
92
|
+
def test_store_transfer(self, learning_db):
|
|
93
|
+
pid = learning_db.store_behavioral_pattern("tag_success", "python", confidence=0.8)
|
|
94
|
+
tid = learning_db.store_cross_project("project_a", "project_b", pid, confidence=0.7)
|
|
95
|
+
assert isinstance(tid, int)
|
|
96
|
+
|
|
97
|
+
def test_get_transfers(self, learning_db):
|
|
98
|
+
pid = learning_db.store_behavioral_pattern("tag_success", "python", confidence=0.8)
|
|
99
|
+
learning_db.store_cross_project("proj_a", "proj_b", pid, confidence=0.7)
|
|
100
|
+
results = learning_db.get_cross_project_transfers(source_project="proj_a")
|
|
101
|
+
assert len(results) == 1
|
|
102
|
+
assert results[0]["target_project"] == "proj_b"
|
|
103
|
+
|
|
104
|
+
def test_existing_tables_untouched(self, learning_db):
|
|
105
|
+
"""Existing 6 tables should still exist."""
|
|
106
|
+
conn = learning_db._get_connection()
|
|
107
|
+
tables = {r[0] for r in conn.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall()}
|
|
108
|
+
conn.close()
|
|
109
|
+
for expected in ["transferable_patterns", "workflow_patterns", "ranking_feedback", "ranking_models", "source_quality", "engagement_metrics"]:
|
|
110
|
+
assert expected in tables
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
|
|
3
|
+
"""Tests for unified learning status with v2.8 engines.
|
|
4
|
+
"""
|
|
5
|
+
import sys
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
sys.path.insert(0, str(Path(__file__).resolve().parent.parent.parent))
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class TestUnifiedLearningStatus:
|
|
11
|
+
def test_get_status_has_v28_engines(self):
|
|
12
|
+
"""Status should include lifecycle, behavioral, compliance info."""
|
|
13
|
+
from learning import get_status
|
|
14
|
+
status = get_status()
|
|
15
|
+
assert "v28_engines" in status
|
|
16
|
+
|
|
17
|
+
def test_v28_engines_structure(self):
|
|
18
|
+
from learning import get_status
|
|
19
|
+
status = get_status()
|
|
20
|
+
engines = status["v28_engines"]
|
|
21
|
+
assert "lifecycle" in engines
|
|
22
|
+
assert "behavioral" in engines
|
|
23
|
+
assert "compliance" in engines
|
|
24
|
+
|
|
25
|
+
def test_lifecycle_status_included(self):
|
|
26
|
+
from learning import get_status
|
|
27
|
+
status = get_status()
|
|
28
|
+
lifecycle = status["v28_engines"]["lifecycle"]
|
|
29
|
+
assert "available" in lifecycle
|
|
30
|
+
|
|
31
|
+
def test_behavioral_status_included(self):
|
|
32
|
+
from learning import get_status
|
|
33
|
+
status = get_status()
|
|
34
|
+
behavioral = status["v28_engines"]["behavioral"]
|
|
35
|
+
assert "available" in behavioral
|
|
36
|
+
|
|
37
|
+
def test_compliance_status_included(self):
|
|
38
|
+
from learning import get_status
|
|
39
|
+
status = get_status()
|
|
40
|
+
compliance = status["v28_engines"]["compliance"]
|
|
41
|
+
assert "available" in compliance
|
|
42
|
+
|
|
43
|
+
def test_graceful_when_engines_unavailable(self):
|
|
44
|
+
"""Status should not crash even if engine imports fail."""
|
|
45
|
+
from learning import get_status
|
|
46
|
+
status = get_status()
|
|
47
|
+
# Should always return a dict with v28_engines
|
|
48
|
+
assert isinstance(status["v28_engines"], dict)
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
# SPDX-License-Identifier: MIT
|
|
2
|
+
# Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
|
|
3
|
+
"""Tests for v2.8 outcome signal types in feedback collector.
|
|
4
|
+
"""
|
|
5
|
+
import pytest
|
|
6
|
+
import sys
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
sys.path.insert(0, str(Path(__file__).resolve().parent.parent.parent))
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class TestOutcomeSignalTypes:
|
|
13
|
+
"""Verify outcome signal types are registered and have correct values."""
|
|
14
|
+
|
|
15
|
+
def test_outcome_success_registered(self):
|
|
16
|
+
from learning.feedback_collector import FeedbackCollector
|
|
17
|
+
assert "outcome_success" in FeedbackCollector.SIGNAL_VALUES
|
|
18
|
+
assert FeedbackCollector.SIGNAL_VALUES["outcome_success"] == 1.0
|
|
19
|
+
|
|
20
|
+
def test_outcome_partial_registered(self):
|
|
21
|
+
from learning.feedback_collector import FeedbackCollector
|
|
22
|
+
assert "outcome_partial" in FeedbackCollector.SIGNAL_VALUES
|
|
23
|
+
assert FeedbackCollector.SIGNAL_VALUES["outcome_partial"] == 0.5
|
|
24
|
+
|
|
25
|
+
def test_outcome_failure_registered(self):
|
|
26
|
+
from learning.feedback_collector import FeedbackCollector
|
|
27
|
+
assert "outcome_failure" in FeedbackCollector.SIGNAL_VALUES
|
|
28
|
+
assert FeedbackCollector.SIGNAL_VALUES["outcome_failure"] == 0.0
|
|
29
|
+
|
|
30
|
+
def test_outcome_retry_registered(self):
|
|
31
|
+
from learning.feedback_collector import FeedbackCollector
|
|
32
|
+
assert "outcome_retry" in FeedbackCollector.SIGNAL_VALUES
|
|
33
|
+
assert FeedbackCollector.SIGNAL_VALUES["outcome_retry"] == 0.2
|
|
34
|
+
|
|
35
|
+
def test_existing_signals_unchanged(self):
|
|
36
|
+
"""All 17 original signal types still present with correct values."""
|
|
37
|
+
from learning.feedback_collector import FeedbackCollector
|
|
38
|
+
SV = FeedbackCollector.SIGNAL_VALUES
|
|
39
|
+
assert SV["mcp_used_high"] == 1.0
|
|
40
|
+
assert SV["dashboard_thumbs_up"] == 1.0
|
|
41
|
+
assert SV["implicit_positive_timegap"] == 0.6
|
|
42
|
+
assert SV["passive_decay"] == 0.0
|
|
43
|
+
assert len(SV) == 21 # 17 original + 4 new
|
|
44
|
+
|
|
45
|
+
def test_total_signal_count(self):
|
|
46
|
+
"""Should have exactly 21 signal types (17 + 4)."""
|
|
47
|
+
from learning.feedback_collector import FeedbackCollector
|
|
48
|
+
assert len(FeedbackCollector.SIGNAL_VALUES) == 21
|
|
@@ -1,10 +1,6 @@
|
|
|
1
1
|
#!/usr/bin/env python3
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
Copyright (c) 2026 Varun Pratap Bhardwaj
|
|
5
|
-
Licensed under MIT License
|
|
6
|
-
"""
|
|
7
|
-
|
|
2
|
+
# SPDX-License-Identifier: MIT
|
|
3
|
+
# Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
|
|
8
4
|
import sqlite3
|
|
9
5
|
from pathlib import Path
|
|
10
6
|
|
|
@@ -0,0 +1,319 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# SPDX-License-Identifier: MIT
|
|
3
|
+
# Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
|
|
4
|
+
"""SuperLocalMemory V2 - Tests for v2.8 Schema Migration
|
|
5
|
+
|
|
6
|
+
Tests that the v2.8.0 lifecycle + access control columns are added
|
|
7
|
+
via backward-compatible ALTER TABLE migration in memory_store_v2.py.
|
|
8
|
+
"""
|
|
9
|
+
import json
|
|
10
|
+
import sqlite3
|
|
11
|
+
import sys
|
|
12
|
+
import importlib
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
|
|
15
|
+
import pytest
|
|
16
|
+
|
|
17
|
+
# Ensure src/ is importable AND takes precedence over ~/.claude-memory/
|
|
18
|
+
# (the installed production copy). Other test modules in this suite may
|
|
19
|
+
# cause ~/.claude-memory to appear on sys.path earlier, so we must ensure
|
|
20
|
+
# our development src/ directory wins.
|
|
21
|
+
SRC_DIR = Path(__file__).resolve().parent.parent.parent # src/
|
|
22
|
+
_src_str = str(SRC_DIR)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _import_memory_store_v2():
|
|
26
|
+
"""
|
|
27
|
+
Import MemoryStoreV2 from the development src/ directory.
|
|
28
|
+
|
|
29
|
+
When running in a full test suite, other test modules may cause
|
|
30
|
+
~/.claude-memory/ (the installed production copy) to appear on sys.path
|
|
31
|
+
before src/. This helper ensures we always load from the correct location
|
|
32
|
+
by temporarily prioritizing src/ and invalidating any stale cached import.
|
|
33
|
+
"""
|
|
34
|
+
# Ensure src/ is at position 0
|
|
35
|
+
if sys.path[0] != _src_str:
|
|
36
|
+
if _src_str in sys.path:
|
|
37
|
+
sys.path.remove(_src_str)
|
|
38
|
+
sys.path.insert(0, _src_str)
|
|
39
|
+
|
|
40
|
+
# If memory_store_v2 was already imported from a different location
|
|
41
|
+
# (e.g., ~/.claude-memory/), force a reimport from src/
|
|
42
|
+
mod = sys.modules.get("memory_store_v2")
|
|
43
|
+
if mod is not None and hasattr(mod, "__file__"):
|
|
44
|
+
mod_path = str(Path(mod.__file__).resolve().parent)
|
|
45
|
+
if mod_path != _src_str:
|
|
46
|
+
del sys.modules["memory_store_v2"]
|
|
47
|
+
# Also clear any cached submodule imports that may hold refs
|
|
48
|
+
for key in list(sys.modules.keys()):
|
|
49
|
+
m = sys.modules[key]
|
|
50
|
+
if m is not None and hasattr(m, "__file__") and m.__file__ and "memory_store_v2" in str(m.__file__):
|
|
51
|
+
pass # memory_store_v2 is not a package, no submodules
|
|
52
|
+
|
|
53
|
+
from memory_store_v2 import MemoryStoreV2
|
|
54
|
+
return MemoryStoreV2
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
# ---------------------------------------------------------------------------
|
|
58
|
+
# Helper — create a v2.7.6 schema database (WITHOUT lifecycle columns)
|
|
59
|
+
# ---------------------------------------------------------------------------
|
|
60
|
+
|
|
61
|
+
def _create_v276_database(db_path: Path) -> None:
|
|
62
|
+
"""
|
|
63
|
+
Create a minimal memories table matching the v2.7.6 schema.
|
|
64
|
+
This deliberately omits the v2.8 lifecycle columns so the migration
|
|
65
|
+
can be verified.
|
|
66
|
+
"""
|
|
67
|
+
conn = sqlite3.connect(str(db_path))
|
|
68
|
+
conn.execute("PRAGMA journal_mode=WAL")
|
|
69
|
+
conn.execute("""
|
|
70
|
+
CREATE TABLE IF NOT EXISTS memories (
|
|
71
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
72
|
+
content TEXT NOT NULL,
|
|
73
|
+
summary TEXT,
|
|
74
|
+
project_path TEXT,
|
|
75
|
+
project_name TEXT,
|
|
76
|
+
tags TEXT,
|
|
77
|
+
category TEXT,
|
|
78
|
+
parent_id INTEGER,
|
|
79
|
+
tree_path TEXT,
|
|
80
|
+
depth INTEGER DEFAULT 0,
|
|
81
|
+
memory_type TEXT DEFAULT 'session',
|
|
82
|
+
importance INTEGER DEFAULT 5,
|
|
83
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
84
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
85
|
+
last_accessed TIMESTAMP,
|
|
86
|
+
access_count INTEGER DEFAULT 0,
|
|
87
|
+
content_hash TEXT UNIQUE,
|
|
88
|
+
cluster_id INTEGER,
|
|
89
|
+
profile TEXT DEFAULT 'default',
|
|
90
|
+
FOREIGN KEY (parent_id) REFERENCES memories(id) ON DELETE CASCADE
|
|
91
|
+
)
|
|
92
|
+
""")
|
|
93
|
+
conn.commit()
|
|
94
|
+
conn.close()
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def _insert_test_memory(db_path: Path, content: str = "test memory content") -> int:
|
|
98
|
+
"""Insert a single test memory into a v2.7.6 database and return its id."""
|
|
99
|
+
conn = sqlite3.connect(str(db_path))
|
|
100
|
+
cursor = conn.cursor()
|
|
101
|
+
cursor.execute(
|
|
102
|
+
"INSERT INTO memories (content, profile) VALUES (?, ?)",
|
|
103
|
+
(content, "default"),
|
|
104
|
+
)
|
|
105
|
+
conn.commit()
|
|
106
|
+
row_id = cursor.lastrowid
|
|
107
|
+
conn.close()
|
|
108
|
+
return row_id
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def _get_column_names(db_path: Path, table: str = "memories") -> set:
|
|
112
|
+
"""Return the set of column names for a table."""
|
|
113
|
+
conn = sqlite3.connect(str(db_path))
|
|
114
|
+
cursor = conn.cursor()
|
|
115
|
+
cursor.execute(f"PRAGMA table_info({table})")
|
|
116
|
+
columns = {row[1] for row in cursor.fetchall()}
|
|
117
|
+
conn.close()
|
|
118
|
+
return columns
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def _get_index_names(db_path: Path) -> set:
|
|
122
|
+
"""Return the set of index names in the database."""
|
|
123
|
+
conn = sqlite3.connect(str(db_path))
|
|
124
|
+
cursor = conn.cursor()
|
|
125
|
+
cursor.execute("SELECT name FROM sqlite_master WHERE type='index'")
|
|
126
|
+
indexes = {row[0] for row in cursor.fetchall()}
|
|
127
|
+
conn.close()
|
|
128
|
+
return indexes
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
# ---------------------------------------------------------------------------
|
|
132
|
+
# Fixtures
|
|
133
|
+
# ---------------------------------------------------------------------------
|
|
134
|
+
|
|
135
|
+
@pytest.fixture
|
|
136
|
+
def v276_db(tmp_path):
|
|
137
|
+
"""
|
|
138
|
+
Create a temporary v2.7.6 schema database with one pre-existing memory.
|
|
139
|
+
Returns (db_path, memory_id).
|
|
140
|
+
"""
|
|
141
|
+
db_path = tmp_path / "memory.db"
|
|
142
|
+
_create_v276_database(db_path)
|
|
143
|
+
mem_id = _insert_test_memory(db_path, "pre-existing memory from v2.7.6")
|
|
144
|
+
return db_path, mem_id
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
@pytest.fixture
|
|
148
|
+
def migrated_store(v276_db):
|
|
149
|
+
"""
|
|
150
|
+
Initialize MemoryStoreV2 on the v2.7.6 database, triggering the migration.
|
|
151
|
+
Returns (store, db_path, pre_existing_memory_id).
|
|
152
|
+
"""
|
|
153
|
+
db_path, mem_id = v276_db
|
|
154
|
+
MemoryStoreV2 = _import_memory_store_v2()
|
|
155
|
+
store = MemoryStoreV2(db_path=db_path)
|
|
156
|
+
return store, db_path, mem_id
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
# ---------------------------------------------------------------------------
|
|
160
|
+
# Tests
|
|
161
|
+
# ---------------------------------------------------------------------------
|
|
162
|
+
|
|
163
|
+
class TestV28SchemaMigration:
|
|
164
|
+
"""Verify v2.8.0 lifecycle + access control migration."""
|
|
165
|
+
|
|
166
|
+
def test_lifecycle_state_column_added(self, migrated_store):
|
|
167
|
+
"""After MemoryStoreV2 init, lifecycle_state column exists."""
|
|
168
|
+
_store, db_path, _mem_id = migrated_store
|
|
169
|
+
columns = _get_column_names(db_path)
|
|
170
|
+
assert "lifecycle_state" in columns, (
|
|
171
|
+
f"lifecycle_state column missing. Columns: {sorted(columns)}"
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
def test_existing_memories_get_active_state(self, migrated_store):
|
|
175
|
+
"""Pre-existing memories get lifecycle_state='active' from DEFAULT."""
|
|
176
|
+
_store, db_path, mem_id = migrated_store
|
|
177
|
+
conn = sqlite3.connect(str(db_path))
|
|
178
|
+
cursor = conn.cursor()
|
|
179
|
+
cursor.execute(
|
|
180
|
+
"SELECT lifecycle_state FROM memories WHERE id = ?", (mem_id,)
|
|
181
|
+
)
|
|
182
|
+
row = cursor.fetchone()
|
|
183
|
+
conn.close()
|
|
184
|
+
|
|
185
|
+
assert row is not None, "Pre-existing memory not found after migration"
|
|
186
|
+
assert row[0] == "active", (
|
|
187
|
+
f"Expected lifecycle_state='active', got '{row[0]}'"
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
def test_access_level_column_added(self, migrated_store):
|
|
191
|
+
"""access_level column exists with DEFAULT 'public'."""
|
|
192
|
+
_store, db_path, mem_id = migrated_store
|
|
193
|
+
columns = _get_column_names(db_path)
|
|
194
|
+
assert "access_level" in columns, (
|
|
195
|
+
f"access_level column missing. Columns: {sorted(columns)}"
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
# Verify default value on pre-existing row
|
|
199
|
+
conn = sqlite3.connect(str(db_path))
|
|
200
|
+
cursor = conn.cursor()
|
|
201
|
+
cursor.execute(
|
|
202
|
+
"SELECT access_level FROM memories WHERE id = ?", (mem_id,)
|
|
203
|
+
)
|
|
204
|
+
row = cursor.fetchone()
|
|
205
|
+
conn.close()
|
|
206
|
+
|
|
207
|
+
assert row is not None
|
|
208
|
+
assert row[0] == "public", (
|
|
209
|
+
f"Expected access_level='public', got '{row[0]}'"
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
def test_lifecycle_history_column_added(self, migrated_store):
|
|
213
|
+
"""lifecycle_history column exists with DEFAULT '[]'."""
|
|
214
|
+
_store, db_path, mem_id = migrated_store
|
|
215
|
+
columns = _get_column_names(db_path)
|
|
216
|
+
assert "lifecycle_history" in columns, (
|
|
217
|
+
f"lifecycle_history column missing. Columns: {sorted(columns)}"
|
|
218
|
+
)
|
|
219
|
+
|
|
220
|
+
# Verify default value on pre-existing row
|
|
221
|
+
conn = sqlite3.connect(str(db_path))
|
|
222
|
+
cursor = conn.cursor()
|
|
223
|
+
cursor.execute(
|
|
224
|
+
"SELECT lifecycle_history FROM memories WHERE id = ?", (mem_id,)
|
|
225
|
+
)
|
|
226
|
+
row = cursor.fetchone()
|
|
227
|
+
conn.close()
|
|
228
|
+
|
|
229
|
+
assert row is not None
|
|
230
|
+
assert row[0] == "[]", (
|
|
231
|
+
f"Expected lifecycle_history='[]', got '{row[0]}'"
|
|
232
|
+
)
|
|
233
|
+
# Verify it's valid JSON
|
|
234
|
+
parsed = json.loads(row[0])
|
|
235
|
+
assert parsed == []
|
|
236
|
+
|
|
237
|
+
def test_lifecycle_updated_at_column_added(self, migrated_store):
|
|
238
|
+
"""lifecycle_updated_at column exists (nullable, no default)."""
|
|
239
|
+
_store, db_path, mem_id = migrated_store
|
|
240
|
+
columns = _get_column_names(db_path)
|
|
241
|
+
assert "lifecycle_updated_at" in columns, (
|
|
242
|
+
f"lifecycle_updated_at column missing. Columns: {sorted(columns)}"
|
|
243
|
+
)
|
|
244
|
+
|
|
245
|
+
# Pre-existing rows should have NULL for this column
|
|
246
|
+
conn = sqlite3.connect(str(db_path))
|
|
247
|
+
cursor = conn.cursor()
|
|
248
|
+
cursor.execute(
|
|
249
|
+
"SELECT lifecycle_updated_at FROM memories WHERE id = ?", (mem_id,)
|
|
250
|
+
)
|
|
251
|
+
row = cursor.fetchone()
|
|
252
|
+
conn.close()
|
|
253
|
+
|
|
254
|
+
assert row is not None
|
|
255
|
+
assert row[0] is None, (
|
|
256
|
+
f"Expected lifecycle_updated_at=NULL for pre-existing row, got '{row[0]}'"
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
def test_migration_is_idempotent(self, v276_db):
|
|
260
|
+
"""Running migration twice (two MemoryStoreV2 inits) doesn't error."""
|
|
261
|
+
db_path, mem_id = v276_db
|
|
262
|
+
MemoryStoreV2 = _import_memory_store_v2()
|
|
263
|
+
|
|
264
|
+
# First init — migration runs
|
|
265
|
+
store1 = MemoryStoreV2(db_path=db_path)
|
|
266
|
+
|
|
267
|
+
# Second init — migration runs again (ALTER TABLE should be caught)
|
|
268
|
+
store2 = MemoryStoreV2(db_path=db_path)
|
|
269
|
+
|
|
270
|
+
# Both should succeed, columns should exist exactly once
|
|
271
|
+
columns = _get_column_names(db_path)
|
|
272
|
+
assert "lifecycle_state" in columns
|
|
273
|
+
assert "access_level" in columns
|
|
274
|
+
assert "lifecycle_history" in columns
|
|
275
|
+
assert "lifecycle_updated_at" in columns
|
|
276
|
+
|
|
277
|
+
# Pre-existing data should be intact
|
|
278
|
+
conn = sqlite3.connect(str(db_path))
|
|
279
|
+
cursor = conn.cursor()
|
|
280
|
+
cursor.execute("SELECT content FROM memories WHERE id = ?", (mem_id,))
|
|
281
|
+
row = cursor.fetchone()
|
|
282
|
+
conn.close()
|
|
283
|
+
assert row is not None
|
|
284
|
+
assert row[0] == "pre-existing memory from v2.7.6"
|
|
285
|
+
|
|
286
|
+
def test_existing_queries_still_work(self, migrated_store):
|
|
287
|
+
"""list_all() and search() work after migration."""
|
|
288
|
+
store, db_path, mem_id = migrated_store
|
|
289
|
+
|
|
290
|
+
# list_all should return the pre-existing memory
|
|
291
|
+
all_memories = store.list_all(limit=10)
|
|
292
|
+
assert len(all_memories) >= 1, "list_all() returned no results after migration"
|
|
293
|
+
|
|
294
|
+
found = any(m["id"] == mem_id for m in all_memories)
|
|
295
|
+
assert found, (
|
|
296
|
+
f"Pre-existing memory id={mem_id} not found in list_all() results"
|
|
297
|
+
)
|
|
298
|
+
|
|
299
|
+
# Add a memory through the store API (so FTS is properly populated)
|
|
300
|
+
new_id = store.add_memory(
|
|
301
|
+
content="post-migration memory for search test",
|
|
302
|
+
tags=["test"],
|
|
303
|
+
)
|
|
304
|
+
assert new_id is not None, "add_memory() should succeed after migration"
|
|
305
|
+
|
|
306
|
+
# search should not crash (may return empty if TF-IDF vectors not rebuilt)
|
|
307
|
+
results = store.search("post-migration", limit=5)
|
|
308
|
+
assert isinstance(results, list), "search() should return a list"
|
|
309
|
+
|
|
310
|
+
def test_v28_indexes_created(self, migrated_store):
|
|
311
|
+
"""v2.8.0 indexes for lifecycle_state and access_level exist."""
|
|
312
|
+
_store, db_path, _mem_id = migrated_store
|
|
313
|
+
indexes = _get_index_names(db_path)
|
|
314
|
+
assert "idx_lifecycle_state" in indexes, (
|
|
315
|
+
f"idx_lifecycle_state index missing. Indexes: {sorted(indexes)}"
|
|
316
|
+
)
|
|
317
|
+
assert "idx_access_level" in indexes, (
|
|
318
|
+
f"idx_access_level index missing. Indexes: {sorted(indexes)}"
|
|
319
|
+
)
|
|
@@ -1,12 +1,10 @@
|
|
|
1
1
|
#!/usr/bin/env python3
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
Licensed under MIT License
|
|
2
|
+
# SPDX-License-Identifier: MIT
|
|
3
|
+
# Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
|
|
4
|
+
"""SuperLocalMemory V2 - Signal Inference Engine Tests (v2.7.4)
|
|
6
5
|
|
|
7
6
|
Tests for the implicit feedback signal inference system.
|
|
8
7
|
"""
|
|
9
|
-
|
|
10
8
|
import time
|
|
11
9
|
import threading
|
|
12
10
|
import pytest
|
|
@@ -341,14 +339,14 @@ class TestFeedbackCollectorImplicit:
|
|
|
341
339
|
|
|
342
340
|
|
|
343
341
|
class TestFeatureExpansion:
|
|
344
|
-
"""Test the 10→12 feature vector expansion."""
|
|
342
|
+
"""Test the 10→12→20 feature vector expansion."""
|
|
345
343
|
|
|
346
|
-
def
|
|
347
|
-
"""Feature vector should have
|
|
344
|
+
def test_feature_count_is_20(self):
|
|
345
|
+
"""Feature vector should have 20 dimensions (v2.8)."""
|
|
348
346
|
sys.path.insert(0, str(Path(__file__).parent.parent))
|
|
349
347
|
from feature_extractor import FEATURE_NAMES, NUM_FEATURES
|
|
350
|
-
assert NUM_FEATURES ==
|
|
351
|
-
assert len(FEATURE_NAMES) ==
|
|
348
|
+
assert NUM_FEATURES == 20
|
|
349
|
+
assert len(FEATURE_NAMES) == 20
|
|
352
350
|
|
|
353
351
|
def test_new_features_present(self):
|
|
354
352
|
"""signal_count and avg_signal_value should be in feature names."""
|
|
@@ -359,8 +357,8 @@ class TestFeatureExpansion:
|
|
|
359
357
|
assert FEATURE_NAMES.index('signal_count') == 10
|
|
360
358
|
assert FEATURE_NAMES.index('avg_signal_value') == 11
|
|
361
359
|
|
|
362
|
-
def
|
|
363
|
-
"""Extract should return
|
|
360
|
+
def test_extract_features_returns_20(self):
|
|
361
|
+
"""Extract should return 20-element vector (v2.8)."""
|
|
364
362
|
sys.path.insert(0, str(Path(__file__).parent.parent))
|
|
365
363
|
from feature_extractor import FeatureExtractor
|
|
366
364
|
fe = FeatureExtractor()
|
|
@@ -368,7 +366,7 @@ class TestFeatureExpansion:
|
|
|
368
366
|
{'id': 1, 'content': 'test', 'importance': 5},
|
|
369
367
|
'test'
|
|
370
368
|
)
|
|
371
|
-
assert len(features) ==
|
|
369
|
+
assert len(features) == 20
|
|
372
370
|
|
|
373
371
|
def test_signal_features_with_stats(self):
|
|
374
372
|
"""Signal features should use provided stats."""
|
|
@@ -1,10 +1,6 @@
|
|
|
1
1
|
#!/usr/bin/env python3
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
Copyright (c) 2026 Varun Pratap Bhardwaj
|
|
5
|
-
Licensed under MIT License
|
|
6
|
-
"""
|
|
7
|
-
|
|
2
|
+
# SPDX-License-Identifier: MIT
|
|
3
|
+
# Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
|
|
8
4
|
import sqlite3
|
|
9
5
|
from pathlib import Path
|
|
10
6
|
|
|
@@ -1,10 +1,6 @@
|
|
|
1
1
|
#!/usr/bin/env python3
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
Copyright (c) 2026 Varun Pratap Bhardwaj
|
|
5
|
-
Licensed under MIT License
|
|
6
|
-
"""
|
|
7
|
-
|
|
2
|
+
# SPDX-License-Identifier: MIT
|
|
3
|
+
# Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
|
|
8
4
|
import sqlite3
|
|
9
5
|
from pathlib import Path
|
|
10
6
|
|
|
@@ -258,7 +254,7 @@ class TestGenerateSyntheticData:
|
|
|
258
254
|
assert "label" in r
|
|
259
255
|
assert "source" in r
|
|
260
256
|
assert "features" in r
|
|
261
|
-
assert len(r["features"]) ==
|
|
257
|
+
assert len(r["features"]) == 20 # 20-dimensional feature vector (v2.8)
|
|
262
258
|
|
|
263
259
|
def test_labels_in_range(self, bootstrapper_with_data):
|
|
264
260
|
records = bootstrapper_with_data.generate_synthetic_training_data()
|
|
@@ -1,10 +1,6 @@
|
|
|
1
1
|
#!/usr/bin/env python3
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
Copyright (c) 2026 Varun Pratap Bhardwaj
|
|
5
|
-
Licensed under MIT License
|
|
6
|
-
"""
|
|
7
|
-
|
|
2
|
+
# SPDX-License-Identifier: MIT
|
|
3
|
+
# Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
|
|
8
4
|
import pytest
|
|
9
5
|
|
|
10
6
|
|
|
@@ -1,16 +1,6 @@
|
|
|
1
1
|
#!/usr/bin/env python3
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
Copyright (c) 2026 Varun Pratap Bhardwaj
|
|
5
|
-
Licensed under MIT License
|
|
6
|
-
|
|
7
|
-
Repository: https://github.com/varun369/SuperLocalMemoryV2
|
|
8
|
-
Author: Varun Pratap Bhardwaj (Solution Architect)
|
|
9
|
-
|
|
10
|
-
NOTICE: This software is protected by MIT License.
|
|
11
|
-
Attribution must be preserved in all copies or derivatives.
|
|
12
|
-
"""
|
|
13
|
-
|
|
2
|
+
# SPDX-License-Identifier: MIT
|
|
3
|
+
# Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
|
|
14
4
|
"""
|
|
15
5
|
WorkflowPatternMiner -- Layer 3: Sliding-window sequence and temporal pattern mining.
|
|
16
6
|
|