superlocalmemory 2.8.2 → 2.8.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/ATTRIBUTION.md +1 -1
  2. package/CHANGELOG.md +17 -0
  3. package/README.md +7 -5
  4. package/api_server.py +5 -0
  5. package/bin/slm +35 -0
  6. package/bin/slm.bat +3 -3
  7. package/docs/SECURITY-QUICK-REFERENCE.md +214 -0
  8. package/install.ps1 +11 -11
  9. package/mcp_server.py +78 -10
  10. package/package.json +2 -2
  11. package/requirements-core.txt +16 -18
  12. package/requirements-learning.txt +8 -8
  13. package/requirements.txt +9 -7
  14. package/scripts/prepack.js +33 -0
  15. package/scripts/verify-v27.ps1 +301 -0
  16. package/src/agent_registry.py +32 -28
  17. package/src/auto_backup.py +12 -6
  18. package/src/cache_manager.py +2 -2
  19. package/src/compression/__init__.py +25 -0
  20. package/src/compression/cli.py +150 -0
  21. package/src/compression/cold_storage.py +217 -0
  22. package/src/compression/config.py +72 -0
  23. package/src/compression/orchestrator.py +133 -0
  24. package/src/compression/tier2_compressor.py +228 -0
  25. package/src/compression/tier3_compressor.py +153 -0
  26. package/src/compression/tier_classifier.py +148 -0
  27. package/src/db_connection_manager.py +5 -5
  28. package/src/event_bus.py +24 -22
  29. package/src/hnsw_index.py +3 -3
  30. package/src/learning/__init__.py +5 -4
  31. package/src/learning/adaptive_ranker.py +14 -265
  32. package/src/learning/bootstrap/__init__.py +69 -0
  33. package/src/learning/bootstrap/constants.py +93 -0
  34. package/src/learning/bootstrap/db_queries.py +316 -0
  35. package/src/learning/bootstrap/sampling.py +82 -0
  36. package/src/learning/bootstrap/text_utils.py +71 -0
  37. package/src/learning/cross_project_aggregator.py +58 -57
  38. package/src/learning/db/__init__.py +40 -0
  39. package/src/learning/db/constants.py +44 -0
  40. package/src/learning/db/schema.py +279 -0
  41. package/src/learning/learning_db.py +15 -234
  42. package/src/learning/ranking/__init__.py +33 -0
  43. package/src/learning/ranking/constants.py +84 -0
  44. package/src/learning/ranking/helpers.py +278 -0
  45. package/src/learning/source_quality_scorer.py +66 -65
  46. package/src/learning/synthetic_bootstrap.py +28 -310
  47. package/src/memory/__init__.py +36 -0
  48. package/src/memory/cli.py +205 -0
  49. package/src/memory/constants.py +39 -0
  50. package/src/memory/helpers.py +28 -0
  51. package/src/memory/schema.py +166 -0
  52. package/src/memory-profiles.py +94 -86
  53. package/src/memory-reset.py +187 -185
  54. package/src/memory_compression.py +2 -2
  55. package/src/memory_store_v2.py +44 -354
  56. package/src/migrate_v1_to_v2.py +11 -10
  57. package/src/patterns/analyzers.py +104 -100
  58. package/src/patterns/learner.py +17 -13
  59. package/src/patterns/scoring.py +25 -21
  60. package/src/patterns/store.py +40 -38
  61. package/src/patterns/terminology.py +53 -51
  62. package/src/provenance_tracker.py +2 -2
  63. package/src/qualixar_attribution.py +1 -1
  64. package/src/search/engine.py +16 -14
  65. package/src/search/index_loader.py +13 -11
  66. package/src/setup_validator.py +160 -158
  67. package/src/subscription_manager.py +20 -18
  68. package/src/tree/builder.py +66 -64
  69. package/src/tree/nodes.py +103 -97
  70. package/src/tree/queries.py +142 -137
  71. package/src/tree/schema.py +46 -42
  72. package/src/webhook_dispatcher.py +3 -3
  73. package/ui_server.py +7 -4
@@ -0,0 +1,153 @@
1
+ #!/usr/bin/env python3
2
+ # SPDX-License-Identifier: MIT
3
+ # Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
4
+ """
5
+ Tier 3 compression logic.
6
+ Compresses memories to bullet points only format.
7
+ """
8
+
9
+ import sqlite3
10
+ import json
11
+ import re
12
+ from datetime import datetime
13
+ from pathlib import Path
14
+ from typing import List
15
+
16
+
17
+ MEMORY_DIR = Path.home() / ".claude-memory"
18
+ DB_PATH = MEMORY_DIR / "memory.db"
19
+
20
+
21
+ class Tier3Compressor:
22
+ """Compress memories to bullet points only (Tier 3)."""
23
+
24
+ def __init__(self, db_path: Path = DB_PATH):
25
+ self.db_path = db_path
26
+
27
+ def compress_to_tier3(self, memory_id: int) -> bool:
28
+ """
29
+ Compress memory to bullet points only.
30
+
31
+ Args:
32
+ memory_id: ID of memory to compress
33
+
34
+ Returns:
35
+ True if compression succeeded, False otherwise
36
+ """
37
+ conn = sqlite3.connect(self.db_path)
38
+ try:
39
+ cursor = conn.cursor()
40
+
41
+ # Get Tier 2 compressed content
42
+ cursor.execute('''
43
+ SELECT content, tier FROM memories WHERE id = ?
44
+ ''', (memory_id,))
45
+ result = cursor.fetchone()
46
+
47
+ if not result:
48
+ return False
49
+
50
+ content, current_tier = result
51
+
52
+ # Skip if in wrong tier
53
+ if current_tier != 3:
54
+ return False
55
+
56
+ # Try to parse as Tier 2 compressed content
57
+ try:
58
+ compressed_content = json.loads(content)
59
+
60
+ # Check if already Tier 3
61
+ if isinstance(compressed_content, dict) and 'bullets' in compressed_content:
62
+ return True # Already Tier 3
63
+
64
+ # Get summary from Tier 2
65
+ if isinstance(compressed_content, dict) and 'summary' in compressed_content:
66
+ summary = compressed_content.get('summary', '')
67
+ tier2_archived_at = compressed_content.get('compressed_at')
68
+ original_length = compressed_content.get('original_length', 0)
69
+ else:
70
+ # Not Tier 2 format, treat as plain text
71
+ summary = content
72
+ tier2_archived_at = None
73
+ original_length = len(content)
74
+
75
+ except (json.JSONDecodeError, TypeError):
76
+ # Not JSON, treat as plain text
77
+ summary = content
78
+ tier2_archived_at = None
79
+ original_length = len(content)
80
+
81
+ # Convert summary to bullet points (max 5)
82
+ bullet_points = self._summarize_to_bullets(summary)
83
+
84
+ # Ultra-compressed version
85
+ ultra_compressed = {
86
+ 'bullets': bullet_points,
87
+ 'tier2_archived_at': tier2_archived_at,
88
+ 'original_length': original_length,
89
+ 'compressed_to_tier3_at': datetime.now().isoformat()
90
+ }
91
+
92
+ # Update memory
93
+ cursor.execute('''
94
+ UPDATE memories
95
+ SET content = ?, tier = 3, updated_at = CURRENT_TIMESTAMP
96
+ WHERE id = ?
97
+ ''', (json.dumps(ultra_compressed), memory_id))
98
+
99
+ conn.commit()
100
+ finally:
101
+ conn.close()
102
+ return True
103
+
104
+ def _summarize_to_bullets(self, summary: str, max_bullets: int = 5) -> List[str]:
105
+ """
106
+ Convert summary to bullet points.
107
+
108
+ Args:
109
+ summary: Summary text
110
+ max_bullets: Maximum number of bullets
111
+
112
+ Returns:
113
+ List of bullet point strings
114
+ """
115
+ # Split into sentences
116
+ sentences = re.split(r'[.!?]+', summary)
117
+
118
+ bullets = []
119
+
120
+ for sent in sentences:
121
+ sent = sent.strip()
122
+
123
+ if len(sent) < 10:
124
+ continue
125
+
126
+ # Truncate long sentences
127
+ if len(sent) > 80:
128
+ sent = sent[:77] + '...'
129
+
130
+ bullets.append(sent)
131
+
132
+ if len(bullets) >= max_bullets:
133
+ break
134
+
135
+ return bullets if bullets else ['[No summary available]']
136
+
137
+ def compress_all_tier3(self) -> int:
138
+ """Compress all memories that are in Tier 3."""
139
+ conn = sqlite3.connect(self.db_path)
140
+ try:
141
+ cursor = conn.cursor()
142
+
143
+ cursor.execute('SELECT id FROM memories WHERE tier = 3')
144
+ memory_ids = [row[0] for row in cursor.fetchall()]
145
+ finally:
146
+ conn.close()
147
+
148
+ compressed_count = 0
149
+ for memory_id in memory_ids:
150
+ if self.compress_to_tier3(memory_id):
151
+ compressed_count += 1
152
+
153
+ return compressed_count
@@ -0,0 +1,148 @@
1
+ #!/usr/bin/env python3
2
+ # SPDX-License-Identifier: MIT
3
+ # Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
4
+ """
5
+ Tier classification logic for memory compression.
6
+ Classifies memories into tiers based on age and access patterns.
7
+ """
8
+
9
+ import sqlite3
10
+ from datetime import datetime
11
+ from pathlib import Path
12
+ from typing import List, Dict, Tuple
13
+
14
+ from compression.config import CompressionConfig
15
+
16
+
17
+ MEMORY_DIR = Path.home() / ".claude-memory"
18
+ DB_PATH = MEMORY_DIR / "memory.db"
19
+
20
+
21
+ class TierClassifier:
22
+ """Classify memories into compression tiers based on age and access patterns."""
23
+
24
+ def __init__(self, db_path: Path = DB_PATH):
25
+ self.db_path = db_path
26
+ self.config = CompressionConfig()
27
+ self._ensure_schema()
28
+
29
+ def _ensure_schema(self):
30
+ """Add tier and access tracking columns if not present."""
31
+ conn = sqlite3.connect(self.db_path)
32
+ try:
33
+ cursor = conn.cursor()
34
+
35
+ # Check if tier column exists
36
+ cursor.execute("PRAGMA table_info(memories)")
37
+ columns = [row[1] for row in cursor.fetchall()]
38
+
39
+ if 'tier' not in columns:
40
+ cursor.execute('ALTER TABLE memories ADD COLUMN tier INTEGER DEFAULT 1')
41
+ cursor.execute('CREATE INDEX IF NOT EXISTS idx_tier ON memories(tier)')
42
+
43
+ if 'last_accessed' not in columns:
44
+ cursor.execute('ALTER TABLE memories ADD COLUMN last_accessed TIMESTAMP')
45
+
46
+ if 'access_count' not in columns:
47
+ cursor.execute('ALTER TABLE memories ADD COLUMN access_count INTEGER DEFAULT 0')
48
+
49
+ # Create memory_archive table if not exists
50
+ cursor.execute('''
51
+ CREATE TABLE IF NOT EXISTS memory_archive (
52
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
53
+ memory_id INTEGER UNIQUE NOT NULL,
54
+ full_content TEXT NOT NULL,
55
+ archived_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
56
+ FOREIGN KEY (memory_id) REFERENCES memories(id) ON DELETE CASCADE
57
+ )
58
+ ''')
59
+ cursor.execute('CREATE INDEX IF NOT EXISTS idx_archive_memory ON memory_archive(memory_id)')
60
+
61
+ conn.commit()
62
+ finally:
63
+ conn.close()
64
+
65
+ def classify_memories(self) -> List[Tuple[int, int]]:
66
+ """
67
+ Classify all memories into tiers based on age and access.
68
+
69
+ Returns:
70
+ List of (tier, memory_id) tuples
71
+ """
72
+ if not self.config.enabled:
73
+ return []
74
+
75
+ now = datetime.now()
76
+ conn = sqlite3.connect(self.db_path)
77
+ try:
78
+ cursor = conn.cursor()
79
+
80
+ # Get all memories with access tracking
81
+ cursor.execute('''
82
+ SELECT id, created_at, last_accessed, access_count, importance, tier
83
+ FROM memories
84
+ ''')
85
+ memories = cursor.fetchall()
86
+
87
+ tier_updates = []
88
+
89
+ for memory_id, created_at, last_accessed, access_count, importance, current_tier in memories:
90
+ created = datetime.fromisoformat(created_at)
91
+ age_days = (now - created).days
92
+
93
+ # Override: High-importance memories stay in Tier 1
94
+ if self.config.preserve_high_importance and importance and importance >= 8:
95
+ tier = 1
96
+ # Recently accessed stays in Tier 1
97
+ elif self.config.preserve_recently_accessed and last_accessed:
98
+ last_access = datetime.fromisoformat(last_accessed)
99
+ if (now - last_access).days < 7:
100
+ tier = 1
101
+ else:
102
+ tier = self._classify_by_age(age_days)
103
+ # Age-based classification
104
+ else:
105
+ tier = self._classify_by_age(age_days)
106
+
107
+ # Only update if tier changed
108
+ if tier != current_tier:
109
+ tier_updates.append((tier, memory_id))
110
+
111
+ # Update tier field
112
+ if tier_updates:
113
+ cursor.executemany('''
114
+ UPDATE memories SET tier = ? WHERE id = ?
115
+ ''', tier_updates)
116
+ conn.commit()
117
+
118
+ finally:
119
+ conn.close()
120
+ return tier_updates
121
+
122
+ def _classify_by_age(self, age_days: int) -> int:
123
+ """Classify memory tier based on age."""
124
+ if age_days < self.config.tier2_threshold_days:
125
+ return 1 # Recent
126
+ elif age_days < self.config.tier3_threshold_days:
127
+ return 2 # Active
128
+ else:
129
+ return 3 # Archived
130
+
131
+ def get_tier_stats(self) -> Dict[str, int]:
132
+ """Get count of memories in each tier."""
133
+ conn = sqlite3.connect(self.db_path)
134
+ try:
135
+ cursor = conn.cursor()
136
+
137
+ cursor.execute('''
138
+ SELECT tier, COUNT(*) FROM memories GROUP BY tier
139
+ ''')
140
+ stats = dict(cursor.fetchall())
141
+ finally:
142
+ conn.close()
143
+
144
+ return {
145
+ 'tier1': stats.get(1, 0),
146
+ 'tier2': stats.get(2, 0),
147
+ 'tier3': stats.get(3, 0)
148
+ }
@@ -109,7 +109,7 @@ class DbConnectionManager:
109
109
  return cls._instances[key]
110
110
 
111
111
  @classmethod
112
- def reset_instance(cls, db_path: Optional[Path] = None):
112
+ def reset_instance(cls, db_path: Optional[Path] = None) -> None:
113
113
  """
114
114
  Remove and close a singleton instance. Used for testing and cleanup.
115
115
 
@@ -265,7 +265,7 @@ class DbConnectionManager:
265
265
 
266
266
  return conn
267
267
 
268
- def release_read_connection(self, conn: sqlite3.Connection):
268
+ def release_read_connection(self, conn: sqlite3.Connection) -> None:
269
269
  """
270
270
  Release a read connection back to the pool.
271
271
 
@@ -401,7 +401,7 @@ class DbConnectionManager:
401
401
  # Post-write hooks (Event Bus integration point)
402
402
  # =========================================================================
403
403
 
404
- def register_post_write_hook(self, hook: Callable[[], None]):
404
+ def register_post_write_hook(self, hook: Callable[[], None]) -> None:
405
405
  """
406
406
  Register a callback that fires after every successful write commit.
407
407
 
@@ -415,7 +415,7 @@ class DbConnectionManager:
415
415
  with self._post_write_hooks_lock:
416
416
  self._post_write_hooks.append(hook)
417
417
 
418
- def unregister_post_write_hook(self, hook: Callable[[], None]):
418
+ def unregister_post_write_hook(self, hook: Callable[[], None]) -> None:
419
419
  """
420
420
  Remove a previously registered post-write hook.
421
421
 
@@ -463,7 +463,7 @@ class DbConnectionManager:
463
463
  # Lifecycle management
464
464
  # =========================================================================
465
465
 
466
- def close(self):
466
+ def close(self) -> None:
467
467
  """
468
468
  Shut down the connection manager. Drains the write queue and closes
469
469
  all connections.
package/src/event_bus.py CHANGED
@@ -94,7 +94,7 @@ class EventBus:
94
94
  return cls._instances[key]
95
95
 
96
96
  @classmethod
97
- def reset_instance(cls, db_path: Optional[Path] = None):
97
+ def reset_instance(cls, db_path: Optional[Path] = None) -> None:
98
98
  """Remove and close a singleton instance. Used for testing."""
99
99
  with cls._instances_lock:
100
100
  if db_path is None:
@@ -171,25 +171,27 @@ class EventBus:
171
171
  # Fallback: direct connection
172
172
  import sqlite3
173
173
  conn = sqlite3.connect(str(self.db_path))
174
- cursor = conn.cursor()
175
- cursor.execute('''
176
- CREATE TABLE IF NOT EXISTS memory_events (
177
- id INTEGER PRIMARY KEY AUTOINCREMENT,
178
- event_type TEXT NOT NULL,
179
- memory_id INTEGER,
180
- source_agent TEXT DEFAULT 'user',
181
- source_protocol TEXT DEFAULT 'internal',
182
- payload TEXT,
183
- importance INTEGER DEFAULT 5,
184
- tier TEXT DEFAULT 'hot',
185
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
186
- )
187
- ''')
188
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_events_type ON memory_events(event_type)')
189
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_events_created ON memory_events(created_at)')
190
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_events_tier ON memory_events(tier)')
191
- conn.commit()
192
- conn.close()
174
+ try:
175
+ cursor = conn.cursor()
176
+ cursor.execute('''
177
+ CREATE TABLE IF NOT EXISTS memory_events (
178
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
179
+ event_type TEXT NOT NULL,
180
+ memory_id INTEGER,
181
+ source_agent TEXT DEFAULT 'user',
182
+ source_protocol TEXT DEFAULT 'internal',
183
+ payload TEXT,
184
+ importance INTEGER DEFAULT 5,
185
+ tier TEXT DEFAULT 'hot',
186
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
187
+ )
188
+ ''')
189
+ cursor.execute('CREATE INDEX IF NOT EXISTS idx_events_type ON memory_events(event_type)')
190
+ cursor.execute('CREATE INDEX IF NOT EXISTS idx_events_created ON memory_events(created_at)')
191
+ cursor.execute('CREATE INDEX IF NOT EXISTS idx_events_tier ON memory_events(tier)')
192
+ conn.commit()
193
+ finally:
194
+ conn.close()
193
195
 
194
196
  # =========================================================================
195
197
  # Event Emission
@@ -309,7 +311,7 @@ class EventBus:
309
311
  # Listener Management
310
312
  # =========================================================================
311
313
 
312
- def add_listener(self, callback: Callable[[dict], None]):
314
+ def add_listener(self, callback: Callable[[dict], None]) -> None:
313
315
  """
314
316
  Register a listener that receives every emitted event.
315
317
 
@@ -322,7 +324,7 @@ class EventBus:
322
324
  with self._listeners_lock:
323
325
  self._listeners.append(callback)
324
326
 
325
- def remove_listener(self, callback: Callable[[dict], None]):
327
+ def remove_listener(self, callback: Callable[[dict], None]) -> None:
326
328
  """Remove a previously registered listener."""
327
329
  with self._listeners_lock:
328
330
  try:
package/src/hnsw_index.py CHANGED
@@ -202,7 +202,7 @@ class HNSWIndex:
202
202
  except Exception as e:
203
203
  logger.error(f"Failed to save HNSW index: {e}")
204
204
 
205
- def build(self, vectors: np.ndarray, memory_ids: List[int]):
205
+ def build(self, vectors: np.ndarray, memory_ids: List[int]) -> None:
206
206
  """
207
207
  Build HNSW index from vectors.
208
208
 
@@ -276,7 +276,7 @@ class HNSWIndex:
276
276
  self.id_to_idx = {mem_id: idx for idx, mem_id in enumerate(memory_ids)}
277
277
  logger.info(f"Built fallback index with {len(vectors)} vectors (linear search)")
278
278
 
279
- def add(self, vector: np.ndarray, memory_id: int):
279
+ def add(self, vector: np.ndarray, memory_id: int) -> None:
280
280
  """
281
281
  Add single vector to index (incremental update).
282
282
 
@@ -416,7 +416,7 @@ class HNSWIndex:
416
416
  logger.warning("No search method available (HNSW and sklearn both unavailable)")
417
417
  return []
418
418
 
419
- def update(self, memory_id: int, vector: np.ndarray):
419
+ def update(self, memory_id: int, vector: np.ndarray) -> None:
420
420
  """
421
421
  Update vector for existing memory.
422
422
 
@@ -18,6 +18,7 @@ Dependencies (all optional):
18
18
 
19
19
  import logging
20
20
  from pathlib import Path
21
+ from typing import Any, Optional
21
22
 
22
23
  logger = logging.getLogger("superlocalmemory.learning")
23
24
 
@@ -85,7 +86,7 @@ _feedback_collector = None
85
86
  _engagement_tracker = None
86
87
 
87
88
 
88
- def get_learning_db():
89
+ def get_learning_db() -> Any:
89
90
  """Get or create the LearningDB singleton."""
90
91
  global _learning_db
91
92
  if _learning_db is None:
@@ -98,7 +99,7 @@ def get_learning_db():
98
99
  return _learning_db
99
100
 
100
101
 
101
- def get_adaptive_ranker():
102
+ def get_adaptive_ranker() -> Any:
102
103
  """Get or create the AdaptiveRanker singleton."""
103
104
  global _adaptive_ranker
104
105
  if _adaptive_ranker is None:
@@ -111,7 +112,7 @@ def get_adaptive_ranker():
111
112
  return _adaptive_ranker
112
113
 
113
114
 
114
- def get_feedback_collector():
115
+ def get_feedback_collector() -> Any:
115
116
  """Get or create the FeedbackCollector singleton."""
116
117
  global _feedback_collector
117
118
  if _feedback_collector is None:
@@ -124,7 +125,7 @@ def get_feedback_collector():
124
125
  return _feedback_collector
125
126
 
126
127
 
127
- def get_engagement_tracker():
128
+ def get_engagement_tracker() -> Any:
128
129
  """Get or create the EngagementTracker singleton."""
129
130
  global _engagement_tracker
130
131
  if _engagement_tracker is None: