superlocalmemory 2.8.1 → 2.8.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. package/ATTRIBUTION.md +50 -0
  2. package/CHANGELOG.md +8 -0
  3. package/README.md +31 -20
  4. package/api_server.py +5 -0
  5. package/bin/aider-smart +2 -2
  6. package/bin/slm +18 -18
  7. package/bin/slm.bat +3 -3
  8. package/configs/continue-skills.yaml +4 -4
  9. package/docs/ARCHITECTURE.md +3 -3
  10. package/docs/CLI-COMMANDS-REFERENCE.md +18 -18
  11. package/docs/FRAMEWORK-INTEGRATIONS.md +4 -4
  12. package/docs/SECURITY-QUICK-REFERENCE.md +214 -0
  13. package/docs/UNIVERSAL-INTEGRATION.md +15 -15
  14. package/install.ps1 +11 -11
  15. package/install.sh +4 -4
  16. package/mcp_server.py +4 -4
  17. package/package.json +5 -3
  18. package/requirements-core.txt +16 -18
  19. package/requirements-learning.txt +8 -8
  20. package/requirements.txt +9 -7
  21. package/scripts/prepack.js +33 -0
  22. package/scripts/verify-v27.ps1 +301 -0
  23. package/src/agent_registry.py +32 -28
  24. package/src/auto_backup.py +12 -6
  25. package/src/cache_manager.py +2 -2
  26. package/src/compression/__init__.py +25 -0
  27. package/src/compression/cli.py +150 -0
  28. package/src/compression/cold_storage.py +217 -0
  29. package/src/compression/config.py +72 -0
  30. package/src/compression/orchestrator.py +133 -0
  31. package/src/compression/tier2_compressor.py +228 -0
  32. package/src/compression/tier3_compressor.py +153 -0
  33. package/src/compression/tier_classifier.py +148 -0
  34. package/src/db_connection_manager.py +5 -5
  35. package/src/event_bus.py +24 -22
  36. package/src/graph/graph_core.py +3 -3
  37. package/src/hnsw_index.py +3 -3
  38. package/src/learning/__init__.py +5 -4
  39. package/src/learning/adaptive_ranker.py +14 -265
  40. package/src/learning/bootstrap/__init__.py +69 -0
  41. package/src/learning/bootstrap/constants.py +93 -0
  42. package/src/learning/bootstrap/db_queries.py +316 -0
  43. package/src/learning/bootstrap/sampling.py +82 -0
  44. package/src/learning/bootstrap/text_utils.py +71 -0
  45. package/src/learning/cross_project_aggregator.py +58 -57
  46. package/src/learning/db/__init__.py +40 -0
  47. package/src/learning/db/constants.py +44 -0
  48. package/src/learning/db/schema.py +279 -0
  49. package/src/learning/learning_db.py +15 -234
  50. package/src/learning/ranking/__init__.py +33 -0
  51. package/src/learning/ranking/constants.py +84 -0
  52. package/src/learning/ranking/helpers.py +278 -0
  53. package/src/learning/source_quality_scorer.py +66 -65
  54. package/src/learning/synthetic_bootstrap.py +28 -310
  55. package/src/memory/__init__.py +36 -0
  56. package/src/memory/cli.py +205 -0
  57. package/src/memory/constants.py +39 -0
  58. package/src/memory/helpers.py +28 -0
  59. package/src/memory/schema.py +166 -0
  60. package/src/memory-profiles.py +94 -86
  61. package/src/memory-reset.py +187 -185
  62. package/src/memory_compression.py +2 -2
  63. package/src/memory_store_v2.py +40 -355
  64. package/src/migrate_v1_to_v2.py +11 -10
  65. package/src/patterns/analyzers.py +104 -100
  66. package/src/patterns/learner.py +17 -13
  67. package/src/patterns/scoring.py +25 -21
  68. package/src/patterns/store.py +40 -38
  69. package/src/patterns/terminology.py +53 -51
  70. package/src/provenance_tracker.py +2 -2
  71. package/src/qualixar_attribution.py +139 -0
  72. package/src/qualixar_watermark.py +78 -0
  73. package/src/search/engine.py +16 -14
  74. package/src/search/index_loader.py +13 -11
  75. package/src/setup_validator.py +162 -160
  76. package/src/subscription_manager.py +20 -18
  77. package/src/tree/builder.py +66 -64
  78. package/src/tree/nodes.py +103 -97
  79. package/src/tree/queries.py +142 -137
  80. package/src/tree/schema.py +46 -42
  81. package/src/webhook_dispatcher.py +3 -3
  82. package/ui_server.py +7 -4
  83. /package/bin/{superlocalmemoryv2:learning → superlocalmemoryv2-learning} +0 -0
  84. /package/bin/{superlocalmemoryv2:list → superlocalmemoryv2-list} +0 -0
  85. /package/bin/{superlocalmemoryv2:patterns → superlocalmemoryv2-patterns} +0 -0
  86. /package/bin/{superlocalmemoryv2:profile → superlocalmemoryv2-profile} +0 -0
  87. /package/bin/{superlocalmemoryv2:recall → superlocalmemoryv2-recall} +0 -0
  88. /package/bin/{superlocalmemoryv2:remember → superlocalmemoryv2-remember} +0 -0
  89. /package/bin/{superlocalmemoryv2:reset → superlocalmemoryv2-reset} +0 -0
  90. /package/bin/{superlocalmemoryv2:status → superlocalmemoryv2-status} +0 -0
@@ -0,0 +1,139 @@
1
+ #!/usr/bin/env python3
2
+ # SPDX-License-Identifier: MIT
3
+ # Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
4
+ # Part of Qualixar — Advancing Agent Development Through Research
5
+ """
6
+ Qualixar Attribution — Cryptographic output signing for provenance tracking.
7
+
8
+ Signs tool outputs with SHA-256 content hashes and verifiable provenance
9
+ metadata. Part of the 3-layer Qualixar attribution system:
10
+ Layer 1: Visible attribution (ATTRIBUTION.md, get_attribution())
11
+ Layer 2: Cryptographic signing (this module)
12
+ Layer 3: Steganographic watermarking (qualixar_watermark.py)
13
+ """
14
+
15
+ import hashlib
16
+ import json
17
+ import time
18
+ from typing import Any, Dict
19
+
20
+
21
+ # Registry of all Qualixar research initiative tools
22
+ TOOL_REGISTRY: Dict[str, str] = {
23
+ "agentassert": "AgentAssert — Behavioral Contracts",
24
+ "agentassay": "AgentAssay — Stochastic Testing",
25
+ "skillfortify": "SkillFortify — Security Validation",
26
+ "superlocalmemory": "SuperLocalMemory — Agent Memory",
27
+ "telephonebench": "TelephoneBench — Communication Benchmarks",
28
+ "vibecheck": "VibeCheck — Code Reliability",
29
+ "agentreplay": "AgentReplay — Time-Travel Debugging",
30
+ "agentchaos": "AgentChaos — Chaos Engineering",
31
+ "agentmigrate": "AgentMigrate — Migration Engineering",
32
+ "agentpact": "AgentPact — Composition Testing",
33
+ }
34
+
35
+
36
+ class QualixarSigner:
37
+ """Signs tool outputs with cryptographic provenance metadata.
38
+
39
+ Adds a ``_qualixar`` block to output dictionaries containing a SHA-256
40
+ content hash, timestamp, tool information, and a verifiable signature.
41
+
42
+ Args:
43
+ tool_name: Identifier for the tool (e.g. ``"superlocalmemory"``).
44
+ version: Semantic version string of the tool.
45
+
46
+ Example::
47
+
48
+ signer = QualixarSigner("superlocalmemory", "2.8.3")
49
+ signed = signer.sign({"memories": [...]})
50
+ assert QualixarSigner.verify(signed) is True
51
+ """
52
+
53
+ def __init__(self, tool_name: str, version: str) -> None:
54
+ self.tool_name = tool_name
55
+ self.version = version
56
+ self.tool_desc = TOOL_REGISTRY.get(tool_name, tool_name)
57
+
58
+ def sign(self, output_data: Any) -> Dict[str, Any]:
59
+ """Add cryptographic provenance to any output.
60
+
61
+ Args:
62
+ output_data: The data to sign. If a dict, provenance is added
63
+ in-place. Otherwise wrapped in ``{"data": ...}``.
64
+
65
+ Returns:
66
+ Dictionary with the original data plus a ``_qualixar`` provenance
67
+ block containing content_hash, timestamp, and signature.
68
+ """
69
+ timestamp = time.time()
70
+ canonical = json.dumps(output_data, sort_keys=True, default=str)
71
+ content_hash = hashlib.sha256(canonical.encode()).hexdigest()
72
+
73
+ provenance = {
74
+ "_qualixar": {
75
+ "tool": self.tool_name,
76
+ "tool_description": self.tool_desc,
77
+ "version": self.version,
78
+ "platform": "Qualixar",
79
+ "timestamp": timestamp,
80
+ "content_hash": content_hash,
81
+ "license": "MIT",
82
+ "attribution": (
83
+ f"Generated by {self.tool_desc} v{self.version}"
84
+ ),
85
+ "signature": self._compute_signature(
86
+ content_hash, timestamp
87
+ ),
88
+ }
89
+ }
90
+
91
+ if isinstance(output_data, dict):
92
+ output_data.update(provenance)
93
+ return output_data
94
+ return {"data": output_data, **provenance}
95
+
96
+ def _compute_signature(
97
+ self, content_hash: str, timestamp: float
98
+ ) -> str:
99
+ """Compute HMAC-style signature over content hash and timestamp.
100
+
101
+ In production, this should use Ed25519 with a private key.
102
+ The current implementation uses SHA-256 over a canonical string
103
+ as a lightweight integrity check.
104
+
105
+ Args:
106
+ content_hash: SHA-256 hex digest of the canonical content.
107
+ timestamp: Unix timestamp of signing.
108
+
109
+ Returns:
110
+ Hex-encoded SHA-256 signature string.
111
+ """
112
+ sig_input = (
113
+ f"{self.tool_name}:{self.version}:"
114
+ f"{content_hash}:{timestamp}"
115
+ )
116
+ return hashlib.sha256(sig_input.encode()).hexdigest()
117
+
118
+ @staticmethod
119
+ def verify(output_data: Dict[str, Any]) -> bool:
120
+ """Verify an output's provenance signature.
121
+
122
+ Re-computes the content hash from the non-provenance fields and
123
+ compares it against the hash stored in the ``_qualixar`` block.
124
+
125
+ Args:
126
+ output_data: A dictionary previously signed with :meth:`sign`.
127
+
128
+ Returns:
129
+ ``True`` if the content hash matches, ``False`` otherwise.
130
+ """
131
+ prov = output_data.get("_qualixar", {})
132
+ if not prov:
133
+ return False
134
+ content = {
135
+ k: v for k, v in output_data.items() if k != "_qualixar"
136
+ }
137
+ canonical = json.dumps(content, sort_keys=True, default=str)
138
+ expected_hash = hashlib.sha256(canonical.encode()).hexdigest()
139
+ return prov.get("content_hash") == expected_hash
@@ -0,0 +1,78 @@
1
+ #!/usr/bin/env python3
2
+ # SPDX-License-Identifier: MIT
3
+ # Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
4
+ # Part of Qualixar — Advancing Agent Development Through Research
5
+ """
6
+ Qualixar Watermark — Steganographic attribution for text outputs.
7
+
8
+ Embeds invisible zero-width Unicode characters in text to encode a tool
9
+ identifier. The watermark is invisible to human readers but can be
10
+ extracted programmatically to verify provenance.
11
+
12
+ Part of the 3-layer Qualixar attribution system:
13
+ Layer 1: Visible attribution (ATTRIBUTION.md, get_attribution())
14
+ Layer 2: Cryptographic signing (qualixar_attribution.py)
15
+ Layer 3: Steganographic watermarking (this module)
16
+
17
+ No external dependencies required.
18
+ """
19
+
20
+ # Zero-width characters for binary encoding
21
+ ZW_SPACE = '\u200b' # Zero-width space = bit 0
22
+ ZW_JOINER = '\u200d' # Zero-width joiner = bit 1
23
+ ZW_SEP = '\ufeff' # Byte order mark = separator
24
+
25
+
26
+ def encode_watermark(text: str, tool_id: str) -> str:
27
+ """Embed an invisible watermark in text output.
28
+
29
+ Converts ``tool_id`` to binary and encodes each bit as a zero-width
30
+ Unicode character. The watermark is inserted after the first paragraph
31
+ break (``\\n\\n``) so it remains invisible to human readers.
32
+
33
+ Args:
34
+ text: The text to watermark.
35
+ tool_id: Short identifier to embed (e.g. ``"slm"``).
36
+
37
+ Returns:
38
+ The original text with the invisible watermark inserted.
39
+ """
40
+ binary = ''.join(format(ord(c), '08b') for c in tool_id)
41
+ watermark = ZW_SEP
42
+ for bit in binary:
43
+ watermark += ZW_SPACE if bit == '0' else ZW_JOINER
44
+ watermark += ZW_SEP
45
+
46
+ # Insert after first paragraph break (invisible to users)
47
+ if '\n\n' in text:
48
+ idx = text.index('\n\n') + 2
49
+ return text[:idx] + watermark + text[idx:]
50
+ return text + watermark
51
+
52
+
53
+ def decode_watermark(text: str) -> str:
54
+ """Extract a hidden watermark from text.
55
+
56
+ Locates the zero-width separator characters and decodes the binary
57
+ payload between them back into the original tool identifier string.
58
+
59
+ Args:
60
+ text: Text that may contain a watermark.
61
+
62
+ Returns:
63
+ The decoded tool identifier, or an empty string if no watermark
64
+ is found.
65
+ """
66
+ start = text.find(ZW_SEP)
67
+ if start == -1:
68
+ return ""
69
+ end = text.find(ZW_SEP, start + 1)
70
+ if end == -1:
71
+ return ""
72
+ encoded = text[start + 1:end]
73
+ binary = ''.join(
74
+ '0' if c == ZW_SPACE else '1'
75
+ for c in encoded
76
+ )
77
+ chars = [binary[i:i + 8] for i in range(0, len(binary), 8)]
78
+ return ''.join(chr(int(b, 2)) for b in chars if len(b) == 8)
@@ -172,20 +172,22 @@ class HybridSearchEngine(IndexLoaderMixin, SearchMethodsMixin, FusionMixin):
172
172
  id_to_score = {mem_id: score for mem_id, score in raw_results}
173
173
 
174
174
  conn = sqlite3.connect(self.db_path)
175
- cursor = conn.cursor()
176
-
177
- # Fetch memories
178
- placeholders = ','.join(['?'] * len(memory_ids))
179
- cursor.execute(f'''
180
- SELECT id, content, summary, project_path, project_name, tags,
181
- category, parent_id, tree_path, depth, memory_type,
182
- importance, created_at, cluster_id, last_accessed, access_count
183
- FROM memories
184
- WHERE id IN ({placeholders})
185
- ''', memory_ids)
186
-
187
- rows = cursor.fetchall()
188
- conn.close()
175
+ try:
176
+ cursor = conn.cursor()
177
+
178
+ # Fetch memories
179
+ placeholders = ','.join(['?'] * len(memory_ids))
180
+ cursor.execute(f'''
181
+ SELECT id, content, summary, project_path, project_name, tags,
182
+ category, parent_id, tree_path, depth, memory_type,
183
+ importance, created_at, cluster_id, last_accessed, access_count
184
+ FROM memories
185
+ WHERE id IN ({placeholders})
186
+ ''', memory_ids)
187
+
188
+ rows = cursor.fetchall()
189
+ finally:
190
+ conn.close()
189
191
 
190
192
  # Build result dictionaries
191
193
  results = []
@@ -31,17 +31,19 @@ class IndexLoaderMixin:
31
31
  Load documents from database and build search indexes.
32
32
  """
33
33
  conn = sqlite3.connect(self.db_path)
34
- cursor = conn.cursor()
35
-
36
- # Fetch all memories
37
- cursor.execute('''
38
- SELECT id, content, summary, tags
39
- FROM memories
40
- ORDER BY id
41
- ''')
42
-
43
- rows = cursor.fetchall()
44
- conn.close()
34
+ try:
35
+ cursor = conn.cursor()
36
+
37
+ # Fetch all memories
38
+ cursor.execute('''
39
+ SELECT id, content, summary, tags
40
+ FROM memories
41
+ ORDER BY id
42
+ ''')
43
+
44
+ rows = cursor.fetchall()
45
+ finally:
46
+ conn.close()
45
47
 
46
48
  if not rows:
47
49
  return
@@ -144,7 +144,7 @@ def check_database() -> Tuple[bool, str, List[str]]:
144
144
  try:
145
145
  cursor.execute("SELECT COUNT(*) FROM memories")
146
146
  memory_count = cursor.fetchone()[0]
147
- except:
147
+ except Exception:
148
148
  memory_count = 0
149
149
 
150
150
  conn.close()
@@ -192,163 +192,165 @@ def initialize_database() -> Tuple[bool, str]:
192
192
  MEMORY_DIR.mkdir(parents=True, exist_ok=True)
193
193
 
194
194
  conn = sqlite3.connect(DB_PATH)
195
- cursor = conn.cursor()
196
-
197
- # Create memories table (core)
198
- cursor.execute('''
199
- CREATE TABLE IF NOT EXISTS memories (
200
- id INTEGER PRIMARY KEY AUTOINCREMENT,
201
- content TEXT NOT NULL,
202
- summary TEXT,
203
- project_path TEXT,
204
- project_name TEXT,
205
- tags TEXT DEFAULT '[]',
206
- category TEXT,
207
- parent_id INTEGER,
208
- tree_path TEXT DEFAULT '/',
209
- depth INTEGER DEFAULT 0,
210
- memory_type TEXT DEFAULT 'session',
211
- importance INTEGER DEFAULT 5,
212
- content_hash TEXT,
213
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
214
- last_accessed TIMESTAMP,
215
- access_count INTEGER DEFAULT 0,
216
- compressed_at TIMESTAMP,
217
- tier INTEGER DEFAULT 1,
218
- cluster_id INTEGER,
219
- FOREIGN KEY (parent_id) REFERENCES memories(id)
220
- )
221
- ''')
222
-
223
- # Create graph tables
224
- cursor.execute('''
225
- CREATE TABLE IF NOT EXISTS graph_nodes (
226
- id INTEGER PRIMARY KEY AUTOINCREMENT,
227
- memory_id INTEGER UNIQUE NOT NULL,
228
- entities TEXT DEFAULT '[]',
229
- embedding_vector BLOB,
230
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
231
- FOREIGN KEY (memory_id) REFERENCES memories(id)
232
- )
233
- ''')
234
-
235
- cursor.execute('''
236
- CREATE TABLE IF NOT EXISTS graph_edges (
237
- id INTEGER PRIMARY KEY AUTOINCREMENT,
238
- source_memory_id INTEGER NOT NULL,
239
- target_memory_id INTEGER NOT NULL,
240
- similarity REAL NOT NULL,
241
- relationship_type TEXT,
242
- shared_entities TEXT DEFAULT '[]',
243
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
244
- FOREIGN KEY (source_memory_id) REFERENCES memories(id),
245
- FOREIGN KEY (target_memory_id) REFERENCES memories(id),
246
- UNIQUE(source_memory_id, target_memory_id)
247
- )
248
- ''')
249
-
250
- cursor.execute('''
251
- CREATE TABLE IF NOT EXISTS graph_clusters (
252
- id INTEGER PRIMARY KEY AUTOINCREMENT,
253
- cluster_name TEXT,
254
- name TEXT,
255
- description TEXT,
256
- summary TEXT,
257
- memory_count INTEGER DEFAULT 0,
258
- member_count INTEGER DEFAULT 0,
259
- avg_importance REAL DEFAULT 5.0,
260
- top_entities TEXT DEFAULT '[]',
261
- parent_cluster_id INTEGER,
262
- depth INTEGER DEFAULT 0,
263
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
264
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
265
- FOREIGN KEY (parent_cluster_id) REFERENCES graph_clusters(id) ON DELETE SET NULL
266
- )
267
- ''')
268
-
269
- # Create pattern learning tables
270
- cursor.execute('''
271
- CREATE TABLE IF NOT EXISTS identity_patterns (
272
- id INTEGER PRIMARY KEY AUTOINCREMENT,
273
- pattern_type TEXT NOT NULL,
274
- pattern_key TEXT NOT NULL,
275
- pattern_value TEXT,
276
- confidence REAL DEFAULT 0.0,
277
- frequency INTEGER DEFAULT 1,
278
- last_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
279
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
280
- UNIQUE(pattern_type, pattern_key)
281
- )
282
- ''')
283
-
284
- cursor.execute('''
285
- CREATE TABLE IF NOT EXISTS pattern_examples (
286
- id INTEGER PRIMARY KEY AUTOINCREMENT,
287
- pattern_id INTEGER NOT NULL,
288
- memory_id INTEGER NOT NULL,
289
- context TEXT,
290
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
291
- FOREIGN KEY (pattern_id) REFERENCES identity_patterns(id),
292
- FOREIGN KEY (memory_id) REFERENCES memories(id)
293
- )
294
- ''')
295
-
296
- # Create tree table
297
- cursor.execute('''
298
- CREATE TABLE IF NOT EXISTS memory_tree (
299
- id INTEGER PRIMARY KEY AUTOINCREMENT,
300
- node_type TEXT NOT NULL,
301
- name TEXT NOT NULL,
302
- parent_id INTEGER,
303
- tree_path TEXT DEFAULT '/',
304
- depth INTEGER DEFAULT 0,
305
- memory_count INTEGER DEFAULT 0,
306
- created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
307
- FOREIGN KEY (parent_id) REFERENCES memory_tree(id)
308
- )
309
- ''')
310
-
311
- # Create archive table
312
- cursor.execute('''
313
- CREATE TABLE IF NOT EXISTS memory_archive (
314
- id INTEGER PRIMARY KEY AUTOINCREMENT,
315
- original_memory_id INTEGER,
316
- compressed_content TEXT NOT NULL,
317
- compression_type TEXT DEFAULT 'tier2',
318
- original_size INTEGER,
319
- compressed_size INTEGER,
320
- archived_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
321
- )
322
- ''')
323
-
324
- # Create system metadata table for watermarking
325
- cursor.execute('''
326
- CREATE TABLE IF NOT EXISTS system_metadata (
327
- key TEXT PRIMARY KEY,
328
- value TEXT NOT NULL
329
- )
330
- ''')
331
-
332
- # Add system watermark
333
- cursor.execute('''
334
- INSERT OR REPLACE INTO system_metadata (key, value) VALUES
335
- ('product', 'SuperLocalMemory'),
336
- ('website', 'https://superlocalmemory.com'),
337
- ('repository', 'https://github.com/varun369/SuperLocalMemoryV2'),
338
- ('license', 'MIT'),
339
- ('schema_version', '2.0.0')
340
- ''')
341
-
342
- # Create indexes for performance
343
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_memories_project ON memories(project_name)')
344
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_memories_category ON memories(category)')
345
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_memories_cluster ON memories(cluster_id)')
346
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_memories_hash ON memories(content_hash)')
347
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_graph_edges_source ON graph_edges(source_memory_id)')
348
- cursor.execute('CREATE INDEX IF NOT EXISTS idx_graph_edges_target ON graph_edges(target_memory_id)')
349
-
350
- conn.commit()
351
- conn.close()
195
+ try:
196
+ cursor = conn.cursor()
197
+
198
+ # Create memories table (core)
199
+ cursor.execute('''
200
+ CREATE TABLE IF NOT EXISTS memories (
201
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
202
+ content TEXT NOT NULL,
203
+ summary TEXT,
204
+ project_path TEXT,
205
+ project_name TEXT,
206
+ tags TEXT DEFAULT '[]',
207
+ category TEXT,
208
+ parent_id INTEGER,
209
+ tree_path TEXT DEFAULT '/',
210
+ depth INTEGER DEFAULT 0,
211
+ memory_type TEXT DEFAULT 'session',
212
+ importance INTEGER DEFAULT 5,
213
+ content_hash TEXT,
214
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
215
+ last_accessed TIMESTAMP,
216
+ access_count INTEGER DEFAULT 0,
217
+ compressed_at TIMESTAMP,
218
+ tier INTEGER DEFAULT 1,
219
+ cluster_id INTEGER,
220
+ FOREIGN KEY (parent_id) REFERENCES memories(id)
221
+ )
222
+ ''')
223
+
224
+ # Create graph tables
225
+ cursor.execute('''
226
+ CREATE TABLE IF NOT EXISTS graph_nodes (
227
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
228
+ memory_id INTEGER UNIQUE NOT NULL,
229
+ entities TEXT DEFAULT '[]',
230
+ embedding_vector BLOB,
231
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
232
+ FOREIGN KEY (memory_id) REFERENCES memories(id)
233
+ )
234
+ ''')
235
+
236
+ cursor.execute('''
237
+ CREATE TABLE IF NOT EXISTS graph_edges (
238
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
239
+ source_memory_id INTEGER NOT NULL,
240
+ target_memory_id INTEGER NOT NULL,
241
+ similarity REAL NOT NULL,
242
+ relationship_type TEXT,
243
+ shared_entities TEXT DEFAULT '[]',
244
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
245
+ FOREIGN KEY (source_memory_id) REFERENCES memories(id),
246
+ FOREIGN KEY (target_memory_id) REFERENCES memories(id),
247
+ UNIQUE(source_memory_id, target_memory_id)
248
+ )
249
+ ''')
250
+
251
+ cursor.execute('''
252
+ CREATE TABLE IF NOT EXISTS graph_clusters (
253
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
254
+ cluster_name TEXT,
255
+ name TEXT,
256
+ description TEXT,
257
+ summary TEXT,
258
+ memory_count INTEGER DEFAULT 0,
259
+ member_count INTEGER DEFAULT 0,
260
+ avg_importance REAL DEFAULT 5.0,
261
+ top_entities TEXT DEFAULT '[]',
262
+ parent_cluster_id INTEGER,
263
+ depth INTEGER DEFAULT 0,
264
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
265
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
266
+ FOREIGN KEY (parent_cluster_id) REFERENCES graph_clusters(id) ON DELETE SET NULL
267
+ )
268
+ ''')
269
+
270
+ # Create pattern learning tables
271
+ cursor.execute('''
272
+ CREATE TABLE IF NOT EXISTS identity_patterns (
273
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
274
+ pattern_type TEXT NOT NULL,
275
+ pattern_key TEXT NOT NULL,
276
+ pattern_value TEXT,
277
+ confidence REAL DEFAULT 0.0,
278
+ frequency INTEGER DEFAULT 1,
279
+ last_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
280
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
281
+ UNIQUE(pattern_type, pattern_key)
282
+ )
283
+ ''')
284
+
285
+ cursor.execute('''
286
+ CREATE TABLE IF NOT EXISTS pattern_examples (
287
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
288
+ pattern_id INTEGER NOT NULL,
289
+ memory_id INTEGER NOT NULL,
290
+ context TEXT,
291
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
292
+ FOREIGN KEY (pattern_id) REFERENCES identity_patterns(id),
293
+ FOREIGN KEY (memory_id) REFERENCES memories(id)
294
+ )
295
+ ''')
296
+
297
+ # Create tree table
298
+ cursor.execute('''
299
+ CREATE TABLE IF NOT EXISTS memory_tree (
300
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
301
+ node_type TEXT NOT NULL,
302
+ name TEXT NOT NULL,
303
+ parent_id INTEGER,
304
+ tree_path TEXT DEFAULT '/',
305
+ depth INTEGER DEFAULT 0,
306
+ memory_count INTEGER DEFAULT 0,
307
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
308
+ FOREIGN KEY (parent_id) REFERENCES memory_tree(id)
309
+ )
310
+ ''')
311
+
312
+ # Create archive table
313
+ cursor.execute('''
314
+ CREATE TABLE IF NOT EXISTS memory_archive (
315
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
316
+ original_memory_id INTEGER,
317
+ compressed_content TEXT NOT NULL,
318
+ compression_type TEXT DEFAULT 'tier2',
319
+ original_size INTEGER,
320
+ compressed_size INTEGER,
321
+ archived_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
322
+ )
323
+ ''')
324
+
325
+ # Create system metadata table for watermarking
326
+ cursor.execute('''
327
+ CREATE TABLE IF NOT EXISTS system_metadata (
328
+ key TEXT PRIMARY KEY,
329
+ value TEXT NOT NULL
330
+ )
331
+ ''')
332
+
333
+ # Add system watermark
334
+ cursor.execute('''
335
+ INSERT OR REPLACE INTO system_metadata (key, value) VALUES
336
+ ('product', 'SuperLocalMemory'),
337
+ ('website', 'https://superlocalmemory.com'),
338
+ ('repository', 'https://github.com/varun369/SuperLocalMemoryV2'),
339
+ ('license', 'MIT'),
340
+ ('schema_version', '2.0.0')
341
+ ''')
342
+
343
+ # Create indexes for performance
344
+ cursor.execute('CREATE INDEX IF NOT EXISTS idx_memories_project ON memories(project_name)')
345
+ cursor.execute('CREATE INDEX IF NOT EXISTS idx_memories_category ON memories(category)')
346
+ cursor.execute('CREATE INDEX IF NOT EXISTS idx_memories_cluster ON memories(cluster_id)')
347
+ cursor.execute('CREATE INDEX IF NOT EXISTS idx_memories_hash ON memories(content_hash)')
348
+ cursor.execute('CREATE INDEX IF NOT EXISTS idx_graph_edges_source ON graph_edges(source_memory_id)')
349
+ cursor.execute('CREATE INDEX IF NOT EXISTS idx_graph_edges_target ON graph_edges(target_memory_id)')
350
+
351
+ conn.commit()
352
+ finally:
353
+ conn.close()
352
354
 
353
355
  return True, "Database initialized successfully"
354
356
 
@@ -425,9 +427,9 @@ def validate_setup(auto_fix: bool = False) -> bool:
425
427
  print("\n✓ All required checks passed!")
426
428
  print("\nQuick Start Commands:")
427
429
  print(" 1. Add a memory:")
428
- print(" superlocalmemoryv2:remember 'Your content here'")
430
+ print(" superlocalmemoryv2-remember 'Your content here'")
429
431
  print("\n 2. Search memories:")
430
- print(" superlocalmemoryv2:recall 'search query'")
432
+ print(" superlocalmemoryv2-recall 'search query'")
431
433
  print("\n 3. Build knowledge graph (after adding 2+ memories):")
432
434
  print(" python ~/.claude-memory/graph_engine.py build")
433
435
  print("\n 4. Start UI server:")