superlocalmemory 2.8.2 → 2.8.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/ATTRIBUTION.md +1 -1
  2. package/CHANGELOG.md +17 -0
  3. package/README.md +7 -5
  4. package/api_server.py +5 -0
  5. package/bin/slm +35 -0
  6. package/bin/slm.bat +3 -3
  7. package/docs/SECURITY-QUICK-REFERENCE.md +214 -0
  8. package/install.ps1 +11 -11
  9. package/mcp_server.py +78 -10
  10. package/package.json +2 -2
  11. package/requirements-core.txt +16 -18
  12. package/requirements-learning.txt +8 -8
  13. package/requirements.txt +9 -7
  14. package/scripts/prepack.js +33 -0
  15. package/scripts/verify-v27.ps1 +301 -0
  16. package/src/agent_registry.py +32 -28
  17. package/src/auto_backup.py +12 -6
  18. package/src/cache_manager.py +2 -2
  19. package/src/compression/__init__.py +25 -0
  20. package/src/compression/cli.py +150 -0
  21. package/src/compression/cold_storage.py +217 -0
  22. package/src/compression/config.py +72 -0
  23. package/src/compression/orchestrator.py +133 -0
  24. package/src/compression/tier2_compressor.py +228 -0
  25. package/src/compression/tier3_compressor.py +153 -0
  26. package/src/compression/tier_classifier.py +148 -0
  27. package/src/db_connection_manager.py +5 -5
  28. package/src/event_bus.py +24 -22
  29. package/src/hnsw_index.py +3 -3
  30. package/src/learning/__init__.py +5 -4
  31. package/src/learning/adaptive_ranker.py +14 -265
  32. package/src/learning/bootstrap/__init__.py +69 -0
  33. package/src/learning/bootstrap/constants.py +93 -0
  34. package/src/learning/bootstrap/db_queries.py +316 -0
  35. package/src/learning/bootstrap/sampling.py +82 -0
  36. package/src/learning/bootstrap/text_utils.py +71 -0
  37. package/src/learning/cross_project_aggregator.py +58 -57
  38. package/src/learning/db/__init__.py +40 -0
  39. package/src/learning/db/constants.py +44 -0
  40. package/src/learning/db/schema.py +279 -0
  41. package/src/learning/learning_db.py +15 -234
  42. package/src/learning/ranking/__init__.py +33 -0
  43. package/src/learning/ranking/constants.py +84 -0
  44. package/src/learning/ranking/helpers.py +278 -0
  45. package/src/learning/source_quality_scorer.py +66 -65
  46. package/src/learning/synthetic_bootstrap.py +28 -310
  47. package/src/memory/__init__.py +36 -0
  48. package/src/memory/cli.py +205 -0
  49. package/src/memory/constants.py +39 -0
  50. package/src/memory/helpers.py +28 -0
  51. package/src/memory/schema.py +166 -0
  52. package/src/memory-profiles.py +94 -86
  53. package/src/memory-reset.py +187 -185
  54. package/src/memory_compression.py +2 -2
  55. package/src/memory_store_v2.py +44 -354
  56. package/src/migrate_v1_to_v2.py +11 -10
  57. package/src/patterns/analyzers.py +104 -100
  58. package/src/patterns/learner.py +17 -13
  59. package/src/patterns/scoring.py +25 -21
  60. package/src/patterns/store.py +40 -38
  61. package/src/patterns/terminology.py +53 -51
  62. package/src/provenance_tracker.py +2 -2
  63. package/src/qualixar_attribution.py +1 -1
  64. package/src/search/engine.py +16 -14
  65. package/src/search/index_loader.py +13 -11
  66. package/src/setup_validator.py +160 -158
  67. package/src/subscription_manager.py +20 -18
  68. package/src/tree/builder.py +66 -64
  69. package/src/tree/nodes.py +103 -97
  70. package/src/tree/queries.py +142 -137
  71. package/src/tree/schema.py +46 -42
  72. package/src/webhook_dispatcher.py +3 -3
  73. package/ui_server.py +7 -4
@@ -0,0 +1,279 @@
1
+ #!/usr/bin/env python3
2
+ # SPDX-License-Identifier: MIT
3
+ # Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
4
+ """
5
+ Database schema definitions for learning.db.
6
+
7
+ This module contains all CREATE TABLE statements and schema migration logic
8
+ for the learning database. Extracted from learning_db.py to improve modularity.
9
+ """
10
+
11
+ import logging
12
+ import sqlite3
13
+ from typing import List
14
+
15
+ logger = logging.getLogger("superlocalmemory.learning.db.schema")
16
+
17
+
18
+ # SQL table definitions (as constants for reuse and testing)
19
+
20
+ SCHEMA_TRANSFERABLE_PATTERNS = '''
21
+ CREATE TABLE IF NOT EXISTS transferable_patterns (
22
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
23
+ pattern_type TEXT NOT NULL,
24
+ key TEXT NOT NULL,
25
+ value TEXT NOT NULL,
26
+ confidence REAL DEFAULT 0.0,
27
+ evidence_count INTEGER DEFAULT 0,
28
+ profiles_seen INTEGER DEFAULT 0,
29
+ first_seen TIMESTAMP,
30
+ last_seen TIMESTAMP,
31
+ decay_factor REAL DEFAULT 1.0,
32
+ contradictions TEXT DEFAULT '[]',
33
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
34
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
35
+ UNIQUE(pattern_type, key)
36
+ )
37
+ '''
38
+
39
+ SCHEMA_WORKFLOW_PATTERNS = '''
40
+ CREATE TABLE IF NOT EXISTS workflow_patterns (
41
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
42
+ pattern_type TEXT NOT NULL,
43
+ pattern_key TEXT NOT NULL,
44
+ pattern_value TEXT NOT NULL,
45
+ confidence REAL DEFAULT 0.0,
46
+ evidence_count INTEGER DEFAULT 0,
47
+ last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
48
+ metadata TEXT DEFAULT '{}'
49
+ )
50
+ '''
51
+
52
+ SCHEMA_RANKING_FEEDBACK = '''
53
+ CREATE TABLE IF NOT EXISTS ranking_feedback (
54
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
55
+ query_hash TEXT NOT NULL,
56
+ query_keywords TEXT,
57
+ memory_id INTEGER NOT NULL,
58
+ rank_position INTEGER,
59
+ signal_type TEXT NOT NULL,
60
+ signal_value REAL DEFAULT 1.0,
61
+ channel TEXT NOT NULL,
62
+ source_tool TEXT,
63
+ dwell_time REAL,
64
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
65
+ )
66
+ '''
67
+
68
+ SCHEMA_RANKING_MODELS = '''
69
+ CREATE TABLE IF NOT EXISTS ranking_models (
70
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
71
+ model_version TEXT NOT NULL,
72
+ training_samples INTEGER,
73
+ synthetic_samples INTEGER DEFAULT 0,
74
+ real_samples INTEGER DEFAULT 0,
75
+ ndcg_at_10 REAL,
76
+ model_path TEXT,
77
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
78
+ )
79
+ '''
80
+
81
+ SCHEMA_SOURCE_QUALITY = '''
82
+ CREATE TABLE IF NOT EXISTS source_quality (
83
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
84
+ source_id TEXT NOT NULL UNIQUE,
85
+ positive_signals INTEGER DEFAULT 0,
86
+ total_memories INTEGER DEFAULT 0,
87
+ quality_score REAL DEFAULT 0.5,
88
+ last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP
89
+ )
90
+ '''
91
+
92
+ SCHEMA_ENGAGEMENT_METRICS = '''
93
+ CREATE TABLE IF NOT EXISTS engagement_metrics (
94
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
95
+ metric_date DATE NOT NULL UNIQUE,
96
+ memories_created INTEGER DEFAULT 0,
97
+ recalls_performed INTEGER DEFAULT 0,
98
+ feedback_signals INTEGER DEFAULT 0,
99
+ patterns_updated INTEGER DEFAULT 0,
100
+ active_sources TEXT DEFAULT '[]',
101
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
102
+ )
103
+ '''
104
+
105
+ SCHEMA_ACTION_OUTCOMES = '''
106
+ CREATE TABLE IF NOT EXISTS action_outcomes (
107
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
108
+ memory_ids TEXT NOT NULL,
109
+ outcome TEXT NOT NULL,
110
+ action_type TEXT DEFAULT 'other',
111
+ context TEXT DEFAULT '{}',
112
+ confidence REAL DEFAULT 1.0,
113
+ agent_id TEXT DEFAULT 'user',
114
+ project TEXT,
115
+ profile TEXT DEFAULT 'default',
116
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
117
+ )
118
+ '''
119
+
120
+ SCHEMA_BEHAVIORAL_PATTERNS = '''
121
+ CREATE TABLE IF NOT EXISTS behavioral_patterns (
122
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
123
+ pattern_type TEXT NOT NULL,
124
+ pattern_key TEXT NOT NULL,
125
+ success_rate REAL DEFAULT 0.0,
126
+ evidence_count INTEGER DEFAULT 0,
127
+ confidence REAL DEFAULT 0.0,
128
+ metadata TEXT DEFAULT '{}',
129
+ project TEXT,
130
+ profile TEXT DEFAULT 'default',
131
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
132
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
133
+ )
134
+ '''
135
+
136
+ SCHEMA_CROSS_PROJECT_BEHAVIORS = '''
137
+ CREATE TABLE IF NOT EXISTS cross_project_behaviors (
138
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
139
+ source_project TEXT NOT NULL,
140
+ target_project TEXT NOT NULL,
141
+ pattern_id INTEGER NOT NULL,
142
+ transfer_type TEXT DEFAULT 'metadata',
143
+ confidence REAL DEFAULT 0.0,
144
+ profile TEXT DEFAULT 'default',
145
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
146
+ FOREIGN KEY (pattern_id) REFERENCES behavioral_patterns(id)
147
+ )
148
+ '''
149
+
150
+
151
+ def create_all_tables(conn: sqlite3.Connection) -> None:
152
+ """
153
+ Create all learning database tables.
154
+
155
+ Args:
156
+ conn: Open SQLite connection
157
+
158
+ Raises:
159
+ sqlite3.Error: If table creation fails
160
+ """
161
+ cursor = conn.cursor()
162
+
163
+ try:
164
+ # Layer 1: Cross-project transferable patterns
165
+ cursor.execute(SCHEMA_TRANSFERABLE_PATTERNS)
166
+
167
+ # Layer 3: Workflow patterns
168
+ cursor.execute(SCHEMA_WORKFLOW_PATTERNS)
169
+
170
+ # Feedback from all channels
171
+ cursor.execute(SCHEMA_RANKING_FEEDBACK)
172
+
173
+ # Model metadata
174
+ cursor.execute(SCHEMA_RANKING_MODELS)
175
+
176
+ # Source quality scores
177
+ cursor.execute(SCHEMA_SOURCE_QUALITY)
178
+
179
+ # Engagement metrics
180
+ cursor.execute(SCHEMA_ENGAGEMENT_METRICS)
181
+
182
+ # v2.8.0: Behavioral learning tables
183
+ cursor.execute(SCHEMA_ACTION_OUTCOMES)
184
+ cursor.execute(SCHEMA_BEHAVIORAL_PATTERNS)
185
+ cursor.execute(SCHEMA_CROSS_PROJECT_BEHAVIORS)
186
+
187
+ conn.commit()
188
+ logger.debug("All tables created successfully")
189
+
190
+ except Exception as e:
191
+ logger.error("Failed to create tables: %s", e)
192
+ conn.rollback()
193
+ raise
194
+
195
+
196
+ def add_profile_columns(conn: sqlite3.Connection) -> None:
197
+ """
198
+ Add profile columns to tables (migration for v2.7.4+).
199
+
200
+ Args:
201
+ conn: Open SQLite connection
202
+ """
203
+ cursor = conn.cursor()
204
+ tables = ['ranking_feedback', 'transferable_patterns', 'workflow_patterns', 'source_quality']
205
+
206
+ for table in tables:
207
+ try:
208
+ cursor.execute(f'ALTER TABLE {table} ADD COLUMN profile TEXT DEFAULT "default"')
209
+ logger.debug(f"Added profile column to {table}")
210
+ except Exception:
211
+ # Column already exists, ignore
212
+ pass
213
+
214
+ conn.commit()
215
+
216
+
217
+ def create_indexes(conn: sqlite3.Connection) -> None:
218
+ """
219
+ Create all performance indexes.
220
+
221
+ Args:
222
+ conn: Open SQLite connection
223
+ """
224
+ cursor = conn.cursor()
225
+
226
+ indexes = [
227
+ # Profile indexes
228
+ 'CREATE INDEX IF NOT EXISTS idx_feedback_profile ON ranking_feedback(profile)',
229
+ 'CREATE INDEX IF NOT EXISTS idx_patterns_profile ON transferable_patterns(profile)',
230
+ 'CREATE INDEX IF NOT EXISTS idx_workflow_profile ON workflow_patterns(profile)',
231
+
232
+ # Feedback indexes
233
+ 'CREATE INDEX IF NOT EXISTS idx_feedback_query ON ranking_feedback(query_hash)',
234
+ 'CREATE INDEX IF NOT EXISTS idx_feedback_memory ON ranking_feedback(memory_id)',
235
+ 'CREATE INDEX IF NOT EXISTS idx_feedback_channel ON ranking_feedback(channel)',
236
+ 'CREATE INDEX IF NOT EXISTS idx_feedback_created ON ranking_feedback(created_at)',
237
+
238
+ # Pattern indexes
239
+ 'CREATE INDEX IF NOT EXISTS idx_patterns_type ON transferable_patterns(pattern_type)',
240
+ 'CREATE INDEX IF NOT EXISTS idx_workflow_type ON workflow_patterns(pattern_type)',
241
+
242
+ # Engagement index
243
+ 'CREATE INDEX IF NOT EXISTS idx_engagement_date ON engagement_metrics(metric_date)',
244
+
245
+ # v2.8.0 behavioral indexes
246
+ 'CREATE INDEX IF NOT EXISTS idx_outcomes_memory ON action_outcomes(memory_ids)',
247
+ 'CREATE INDEX IF NOT EXISTS idx_outcomes_project ON action_outcomes(project)',
248
+ 'CREATE INDEX IF NOT EXISTS idx_outcomes_profile ON action_outcomes(profile)',
249
+ 'CREATE INDEX IF NOT EXISTS idx_bpatterns_type ON behavioral_patterns(pattern_type)',
250
+ 'CREATE INDEX IF NOT EXISTS idx_bpatterns_project ON behavioral_patterns(project)',
251
+ 'CREATE INDEX IF NOT EXISTS idx_xproject_source ON cross_project_behaviors(source_project)',
252
+ 'CREATE INDEX IF NOT EXISTS idx_xproject_target ON cross_project_behaviors(target_project)',
253
+ ]
254
+
255
+ for index_sql in indexes:
256
+ cursor.execute(index_sql)
257
+
258
+ conn.commit()
259
+ logger.debug("All indexes created successfully")
260
+
261
+
262
+ def initialize_schema(conn: sqlite3.Connection) -> None:
263
+ """
264
+ Full schema initialization: tables + migrations + indexes.
265
+
266
+ Args:
267
+ conn: Open SQLite connection
268
+
269
+ Raises:
270
+ sqlite3.Error: If schema initialization fails
271
+ """
272
+ try:
273
+ create_all_tables(conn)
274
+ add_profile_columns(conn)
275
+ create_indexes(conn)
276
+ logger.info("Learning schema initialized successfully")
277
+ except Exception as e:
278
+ logger.error("Failed to initialize learning schema: %s", e)
279
+ raise
@@ -28,10 +28,10 @@ from datetime import datetime, date
28
28
  from pathlib import Path
29
29
  from typing import Optional, Dict, List, Any
30
30
 
31
- logger = logging.getLogger("superlocalmemory.learning.db")
31
+ from .db.constants import MEMORY_DIR, LEARNING_DB_PATH, DEFAULT_PROFILE
32
+ from .db.schema import initialize_schema
32
33
 
33
- MEMORY_DIR = Path.home() / ".claude-memory"
34
- LEARNING_DB_PATH = MEMORY_DIR / "learning.db"
34
+ logger = logging.getLogger("superlocalmemory.learning.db")
35
35
 
36
36
 
37
37
  class LearningDB:
@@ -108,227 +108,8 @@ class LearningDB:
108
108
  def _init_schema(self):
109
109
  """Create all learning tables if they don't exist."""
110
110
  conn = self._get_connection()
111
- cursor = conn.cursor()
112
-
113
111
  try:
114
- # ------------------------------------------------------------------
115
- # Layer 1: Cross-project transferable patterns
116
- # ------------------------------------------------------------------
117
- cursor.execute('''
118
- CREATE TABLE IF NOT EXISTS transferable_patterns (
119
- id INTEGER PRIMARY KEY AUTOINCREMENT,
120
- pattern_type TEXT NOT NULL,
121
- key TEXT NOT NULL,
122
- value TEXT NOT NULL,
123
- confidence REAL DEFAULT 0.0,
124
- evidence_count INTEGER DEFAULT 0,
125
- profiles_seen INTEGER DEFAULT 0,
126
- first_seen TIMESTAMP,
127
- last_seen TIMESTAMP,
128
- decay_factor REAL DEFAULT 1.0,
129
- contradictions TEXT DEFAULT '[]',
130
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
131
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
132
- UNIQUE(pattern_type, key)
133
- )
134
- ''')
135
-
136
- # ------------------------------------------------------------------
137
- # Layer 3: Workflow patterns (sequences + temporal + style)
138
- # ------------------------------------------------------------------
139
- cursor.execute('''
140
- CREATE TABLE IF NOT EXISTS workflow_patterns (
141
- id INTEGER PRIMARY KEY AUTOINCREMENT,
142
- pattern_type TEXT NOT NULL,
143
- pattern_key TEXT NOT NULL,
144
- pattern_value TEXT NOT NULL,
145
- confidence REAL DEFAULT 0.0,
146
- evidence_count INTEGER DEFAULT 0,
147
- last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
148
- metadata TEXT DEFAULT '{}'
149
- )
150
- ''')
151
-
152
- # ------------------------------------------------------------------
153
- # Feedback from all channels
154
- # ------------------------------------------------------------------
155
- cursor.execute('''
156
- CREATE TABLE IF NOT EXISTS ranking_feedback (
157
- id INTEGER PRIMARY KEY AUTOINCREMENT,
158
- query_hash TEXT NOT NULL,
159
- query_keywords TEXT,
160
- memory_id INTEGER NOT NULL,
161
- rank_position INTEGER,
162
- signal_type TEXT NOT NULL,
163
- signal_value REAL DEFAULT 1.0,
164
- channel TEXT NOT NULL,
165
- source_tool TEXT,
166
- dwell_time REAL,
167
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
168
- )
169
- ''')
170
-
171
- # ------------------------------------------------------------------
172
- # Model metadata
173
- # ------------------------------------------------------------------
174
- cursor.execute('''
175
- CREATE TABLE IF NOT EXISTS ranking_models (
176
- id INTEGER PRIMARY KEY AUTOINCREMENT,
177
- model_version TEXT NOT NULL,
178
- training_samples INTEGER,
179
- synthetic_samples INTEGER DEFAULT 0,
180
- real_samples INTEGER DEFAULT 0,
181
- ndcg_at_10 REAL,
182
- model_path TEXT,
183
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
184
- )
185
- ''')
186
-
187
- # ------------------------------------------------------------------
188
- # Source quality scores (per-source learning)
189
- # ------------------------------------------------------------------
190
- cursor.execute('''
191
- CREATE TABLE IF NOT EXISTS source_quality (
192
- id INTEGER PRIMARY KEY AUTOINCREMENT,
193
- source_id TEXT NOT NULL UNIQUE,
194
- positive_signals INTEGER DEFAULT 0,
195
- total_memories INTEGER DEFAULT 0,
196
- quality_score REAL DEFAULT 0.5,
197
- last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP
198
- )
199
- ''')
200
-
201
- # ------------------------------------------------------------------
202
- # Engagement metrics (local only, never transmitted)
203
- # ------------------------------------------------------------------
204
- cursor.execute('''
205
- CREATE TABLE IF NOT EXISTS engagement_metrics (
206
- id INTEGER PRIMARY KEY AUTOINCREMENT,
207
- metric_date DATE NOT NULL UNIQUE,
208
- memories_created INTEGER DEFAULT 0,
209
- recalls_performed INTEGER DEFAULT 0,
210
- feedback_signals INTEGER DEFAULT 0,
211
- patterns_updated INTEGER DEFAULT 0,
212
- active_sources TEXT DEFAULT '[]',
213
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
214
- )
215
- ''')
216
-
217
- # ------------------------------------------------------------------
218
- # Indexes for performance
219
- # ------------------------------------------------------------------
220
- # v2.7.4: Add profile columns for per-profile learning
221
- for table in ['ranking_feedback', 'transferable_patterns', 'workflow_patterns', 'source_quality']:
222
- try:
223
- cursor.execute(f'ALTER TABLE {table} ADD COLUMN profile TEXT DEFAULT "default"')
224
- except Exception:
225
- pass # Column already exists
226
-
227
- cursor.execute(
228
- 'CREATE INDEX IF NOT EXISTS idx_feedback_profile '
229
- 'ON ranking_feedback(profile)'
230
- )
231
- cursor.execute(
232
- 'CREATE INDEX IF NOT EXISTS idx_patterns_profile '
233
- 'ON transferable_patterns(profile)'
234
- )
235
- cursor.execute(
236
- 'CREATE INDEX IF NOT EXISTS idx_workflow_profile '
237
- 'ON workflow_patterns(profile)'
238
- )
239
- cursor.execute(
240
- 'CREATE INDEX IF NOT EXISTS idx_feedback_query '
241
- 'ON ranking_feedback(query_hash)'
242
- )
243
- cursor.execute(
244
- 'CREATE INDEX IF NOT EXISTS idx_feedback_memory '
245
- 'ON ranking_feedback(memory_id)'
246
- )
247
- cursor.execute(
248
- 'CREATE INDEX IF NOT EXISTS idx_feedback_channel '
249
- 'ON ranking_feedback(channel)'
250
- )
251
- cursor.execute(
252
- 'CREATE INDEX IF NOT EXISTS idx_feedback_created '
253
- 'ON ranking_feedback(created_at)'
254
- )
255
- cursor.execute(
256
- 'CREATE INDEX IF NOT EXISTS idx_patterns_type '
257
- 'ON transferable_patterns(pattern_type)'
258
- )
259
- cursor.execute(
260
- 'CREATE INDEX IF NOT EXISTS idx_workflow_type '
261
- 'ON workflow_patterns(pattern_type)'
262
- )
263
- cursor.execute(
264
- 'CREATE INDEX IF NOT EXISTS idx_engagement_date '
265
- 'ON engagement_metrics(metric_date)'
266
- )
267
-
268
- # ------------------------------------------------------------------
269
- # v2.8.0: Behavioral learning tables
270
- # ------------------------------------------------------------------
271
- cursor.execute('''
272
- CREATE TABLE IF NOT EXISTS action_outcomes (
273
- id INTEGER PRIMARY KEY AUTOINCREMENT,
274
- memory_ids TEXT NOT NULL,
275
- outcome TEXT NOT NULL,
276
- action_type TEXT DEFAULT 'other',
277
- context TEXT DEFAULT '{}',
278
- confidence REAL DEFAULT 1.0,
279
- agent_id TEXT DEFAULT 'user',
280
- project TEXT,
281
- profile TEXT DEFAULT 'default',
282
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
283
- )
284
- ''')
285
-
286
- cursor.execute('''
287
- CREATE TABLE IF NOT EXISTS behavioral_patterns (
288
- id INTEGER PRIMARY KEY AUTOINCREMENT,
289
- pattern_type TEXT NOT NULL,
290
- pattern_key TEXT NOT NULL,
291
- success_rate REAL DEFAULT 0.0,
292
- evidence_count INTEGER DEFAULT 0,
293
- confidence REAL DEFAULT 0.0,
294
- metadata TEXT DEFAULT '{}',
295
- project TEXT,
296
- profile TEXT DEFAULT 'default',
297
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
298
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
299
- )
300
- ''')
301
-
302
- cursor.execute('''
303
- CREATE TABLE IF NOT EXISTS cross_project_behaviors (
304
- id INTEGER PRIMARY KEY AUTOINCREMENT,
305
- source_project TEXT NOT NULL,
306
- target_project TEXT NOT NULL,
307
- pattern_id INTEGER NOT NULL,
308
- transfer_type TEXT DEFAULT 'metadata',
309
- confidence REAL DEFAULT 0.0,
310
- profile TEXT DEFAULT 'default',
311
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
312
- FOREIGN KEY (pattern_id) REFERENCES behavioral_patterns(id)
313
- )
314
- ''')
315
-
316
- # v2.8.0 indexes
317
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_outcomes_memory ON action_outcomes(memory_ids)')
318
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_outcomes_project ON action_outcomes(project)')
319
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_outcomes_profile ON action_outcomes(profile)')
320
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_bpatterns_type ON behavioral_patterns(pattern_type)')
321
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_bpatterns_project ON behavioral_patterns(project)')
322
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_xproject_source ON cross_project_behaviors(source_project)')
323
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_xproject_target ON cross_project_behaviors(target_project)')
324
-
325
- conn.commit()
326
- logger.info("Learning schema initialized successfully")
327
-
328
- except Exception as e:
329
- logger.error("Failed to initialize learning schema: %s", e)
330
- conn.rollback()
331
- raise
112
+ initialize_schema(conn)
332
113
  finally:
333
114
  conn.close()
334
115
 
@@ -682,7 +463,7 @@ class LearningDB:
682
463
  finally:
683
464
  conn.close()
684
465
 
685
- def clear_workflow_patterns(self, pattern_type: Optional[str] = None):
466
+ def clear_workflow_patterns(self, pattern_type: Optional[str] = None) -> None:
686
467
  """Clear workflow patterns (used before re-mining)."""
687
468
  with self._write_lock:
688
469
  conn = self._get_connection()
@@ -712,7 +493,7 @@ class LearningDB:
712
493
  source_id: str,
713
494
  positive_signals: int,
714
495
  total_memories: int,
715
- ):
496
+ ) -> None:
716
497
  """Update quality score for a memory source."""
717
498
  # Beta-Binomial smoothing: (alpha + pos) / (alpha + beta + total)
718
499
  quality_score = (1.0 + positive_signals) / (2.0 + total_memories)
@@ -820,7 +601,7 @@ class LearningDB:
820
601
  metric_type: str,
821
602
  count: int = 1,
822
603
  source: Optional[str] = None,
823
- ):
604
+ ) -> None:
824
605
  """
825
606
  Increment a daily engagement metric.
826
607
 
@@ -966,7 +747,7 @@ class LearningDB:
966
747
  # v2.8.0: Action Outcomes CRUD
967
748
  # ======================================================================
968
749
 
969
- def store_outcome(self, memory_ids, outcome, action_type="other", context=None, confidence=1.0, agent_id="user", project=None, profile="default"):
750
+ def store_outcome(self, memory_ids: Any, outcome: str, action_type: str = "other", context: Optional[Dict] = None, confidence: float = 1.0, agent_id: str = "user", project: Optional[str] = None, profile: str = "default") -> int:
970
751
  """Store an action outcome for behavioral learning."""
971
752
  memory_ids_str = json.dumps(memory_ids if isinstance(memory_ids, list) else [memory_ids])
972
753
  context_str = json.dumps(context or {})
@@ -982,7 +763,7 @@ class LearningDB:
982
763
  finally:
983
764
  conn.close()
984
765
 
985
- def get_outcomes(self, memory_id=None, project=None, profile="default", limit=100):
766
+ def get_outcomes(self, memory_id: Optional[int] = None, project: Optional[str] = None, profile: str = "default", limit: int = 100) -> List[Dict[str, Any]]:
986
767
  """Get action outcomes, optionally filtered."""
987
768
  conn = self._get_connection()
988
769
  try:
@@ -1010,7 +791,7 @@ class LearningDB:
1010
791
  # v2.8.0: Behavioral Patterns CRUD
1011
792
  # ======================================================================
1012
793
 
1013
- def store_behavioral_pattern(self, pattern_type, pattern_key, success_rate=0.0, evidence_count=0, confidence=0.0, metadata=None, project=None, profile="default"):
794
+ def store_behavioral_pattern(self, pattern_type: str, pattern_key: str, success_rate: float = 0.0, evidence_count: int = 0, confidence: float = 0.0, metadata: Optional[Dict] = None, project: Optional[str] = None, profile: str = "default") -> int:
1014
795
  """Store or update a behavioral pattern."""
1015
796
  metadata_str = json.dumps(metadata or {})
1016
797
  conn = self._get_connection()
@@ -1025,7 +806,7 @@ class LearningDB:
1025
806
  finally:
1026
807
  conn.close()
1027
808
 
1028
- def get_behavioral_patterns(self, pattern_type=None, project=None, min_confidence=0.0, profile="default"):
809
+ def get_behavioral_patterns(self, pattern_type: Optional[str] = None, project: Optional[str] = None, min_confidence: float = 0.0, profile: str = "default") -> List[Dict[str, Any]]:
1029
810
  """Get behavioral patterns, optionally filtered."""
1030
811
  conn = self._get_connection()
1031
812
  try:
@@ -1052,7 +833,7 @@ class LearningDB:
1052
833
  # v2.8.0: Cross-Project CRUD
1053
834
  # ======================================================================
1054
835
 
1055
- def store_cross_project(self, source_project, target_project, pattern_id, transfer_type="metadata", confidence=0.0, profile="default"):
836
+ def store_cross_project(self, source_project: str, target_project: str, pattern_id: int, transfer_type: str = "metadata", confidence: float = 0.0, profile: str = "default") -> int:
1056
837
  """Record a cross-project behavioral transfer."""
1057
838
  conn = self._get_connection()
1058
839
  try:
@@ -1066,7 +847,7 @@ class LearningDB:
1066
847
  finally:
1067
848
  conn.close()
1068
849
 
1069
- def get_cross_project_transfers(self, source_project=None, target_project=None, profile="default"):
850
+ def get_cross_project_transfers(self, source_project: Optional[str] = None, target_project: Optional[str] = None, profile: str = "default") -> List[Dict[str, Any]]:
1070
851
  """Get cross-project transfer records."""
1071
852
  conn = self._get_connection()
1072
853
  try:
@@ -1087,7 +868,7 @@ class LearningDB:
1087
868
  # Reset / Cleanup
1088
869
  # ======================================================================
1089
870
 
1090
- def reset(self):
871
+ def reset(self) -> None:
1091
872
  """
1092
873
  Delete all learning data. Memories in memory.db are preserved.
1093
874
 
@@ -1115,7 +896,7 @@ class LearningDB:
1115
896
  finally:
1116
897
  conn.close()
1117
898
 
1118
- def delete_database(self):
899
+ def delete_database(self) -> None:
1119
900
  """
1120
901
  Completely delete learning.db file.
1121
902
  More aggressive than reset() — removes the file entirely.
@@ -0,0 +1,33 @@
1
+ #!/usr/bin/env python3
2
+ # SPDX-License-Identifier: MIT
3
+ # Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
4
+ """
5
+ Ranking utilities package for AdaptiveRanker.
6
+
7
+ Provides constants, helpers, and feature utilities extracted from
8
+ the main adaptive_ranker.py module for better maintainability.
9
+ """
10
+
11
+ from .constants import (
12
+ MODELS_DIR,
13
+ MODEL_PATH,
14
+ PHASE_THRESHOLDS,
15
+ MIN_UNIQUE_QUERIES_FOR_ML,
16
+ RULE_BOOST,
17
+ TRAINING_PARAMS,
18
+ )
19
+ from .helpers import (
20
+ calculate_rule_boost,
21
+ prepare_training_data_internal,
22
+ )
23
+
24
+ __all__ = [
25
+ 'MODELS_DIR',
26
+ 'MODEL_PATH',
27
+ 'PHASE_THRESHOLDS',
28
+ 'MIN_UNIQUE_QUERIES_FOR_ML',
29
+ 'RULE_BOOST',
30
+ 'TRAINING_PARAMS',
31
+ 'calculate_rule_boost',
32
+ 'prepare_training_data_internal',
33
+ ]