superlocalmemory 2.8.1 → 2.8.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ATTRIBUTION.md +50 -0
- package/CHANGELOG.md +8 -0
- package/README.md +31 -20
- package/api_server.py +5 -0
- package/bin/aider-smart +2 -2
- package/bin/slm +18 -18
- package/bin/slm.bat +3 -3
- package/configs/continue-skills.yaml +4 -4
- package/docs/ARCHITECTURE.md +3 -3
- package/docs/CLI-COMMANDS-REFERENCE.md +18 -18
- package/docs/FRAMEWORK-INTEGRATIONS.md +4 -4
- package/docs/SECURITY-QUICK-REFERENCE.md +214 -0
- package/docs/UNIVERSAL-INTEGRATION.md +15 -15
- package/install.ps1 +11 -11
- package/install.sh +4 -4
- package/mcp_server.py +4 -4
- package/package.json +5 -3
- package/requirements-core.txt +16 -18
- package/requirements-learning.txt +8 -8
- package/requirements.txt +9 -7
- package/scripts/prepack.js +33 -0
- package/scripts/verify-v27.ps1 +301 -0
- package/src/agent_registry.py +32 -28
- package/src/auto_backup.py +12 -6
- package/src/cache_manager.py +2 -2
- package/src/compression/__init__.py +25 -0
- package/src/compression/cli.py +150 -0
- package/src/compression/cold_storage.py +217 -0
- package/src/compression/config.py +72 -0
- package/src/compression/orchestrator.py +133 -0
- package/src/compression/tier2_compressor.py +228 -0
- package/src/compression/tier3_compressor.py +153 -0
- package/src/compression/tier_classifier.py +148 -0
- package/src/db_connection_manager.py +5 -5
- package/src/event_bus.py +24 -22
- package/src/graph/graph_core.py +3 -3
- package/src/hnsw_index.py +3 -3
- package/src/learning/__init__.py +5 -4
- package/src/learning/adaptive_ranker.py +14 -265
- package/src/learning/bootstrap/__init__.py +69 -0
- package/src/learning/bootstrap/constants.py +93 -0
- package/src/learning/bootstrap/db_queries.py +316 -0
- package/src/learning/bootstrap/sampling.py +82 -0
- package/src/learning/bootstrap/text_utils.py +71 -0
- package/src/learning/cross_project_aggregator.py +58 -57
- package/src/learning/db/__init__.py +40 -0
- package/src/learning/db/constants.py +44 -0
- package/src/learning/db/schema.py +279 -0
- package/src/learning/learning_db.py +15 -234
- package/src/learning/ranking/__init__.py +33 -0
- package/src/learning/ranking/constants.py +84 -0
- package/src/learning/ranking/helpers.py +278 -0
- package/src/learning/source_quality_scorer.py +66 -65
- package/src/learning/synthetic_bootstrap.py +28 -310
- package/src/memory/__init__.py +36 -0
- package/src/memory/cli.py +205 -0
- package/src/memory/constants.py +39 -0
- package/src/memory/helpers.py +28 -0
- package/src/memory/schema.py +166 -0
- package/src/memory-profiles.py +94 -86
- package/src/memory-reset.py +187 -185
- package/src/memory_compression.py +2 -2
- package/src/memory_store_v2.py +40 -355
- package/src/migrate_v1_to_v2.py +11 -10
- package/src/patterns/analyzers.py +104 -100
- package/src/patterns/learner.py +17 -13
- package/src/patterns/scoring.py +25 -21
- package/src/patterns/store.py +40 -38
- package/src/patterns/terminology.py +53 -51
- package/src/provenance_tracker.py +2 -2
- package/src/qualixar_attribution.py +139 -0
- package/src/qualixar_watermark.py +78 -0
- package/src/search/engine.py +16 -14
- package/src/search/index_loader.py +13 -11
- package/src/setup_validator.py +162 -160
- package/src/subscription_manager.py +20 -18
- package/src/tree/builder.py +66 -64
- package/src/tree/nodes.py +103 -97
- package/src/tree/queries.py +142 -137
- package/src/tree/schema.py +46 -42
- package/src/webhook_dispatcher.py +3 -3
- package/ui_server.py +7 -4
- /package/bin/{superlocalmemoryv2:learning → superlocalmemoryv2-learning} +0 -0
- /package/bin/{superlocalmemoryv2:list → superlocalmemoryv2-list} +0 -0
- /package/bin/{superlocalmemoryv2:patterns → superlocalmemoryv2-patterns} +0 -0
- /package/bin/{superlocalmemoryv2:profile → superlocalmemoryv2-profile} +0 -0
- /package/bin/{superlocalmemoryv2:recall → superlocalmemoryv2-recall} +0 -0
- /package/bin/{superlocalmemoryv2:remember → superlocalmemoryv2-remember} +0 -0
- /package/bin/{superlocalmemoryv2:reset → superlocalmemoryv2-reset} +0 -0
- /package/bin/{superlocalmemoryv2:status → superlocalmemoryv2-status} +0 -0
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# SPDX-License-Identifier: MIT
|
|
3
|
+
# Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
|
|
4
|
+
# Part of Qualixar — Advancing Agent Development Through Research
|
|
5
|
+
"""
|
|
6
|
+
Qualixar Attribution — Cryptographic output signing for provenance tracking.
|
|
7
|
+
|
|
8
|
+
Signs tool outputs with SHA-256 content hashes and verifiable provenance
|
|
9
|
+
metadata. Part of the 3-layer Qualixar attribution system:
|
|
10
|
+
Layer 1: Visible attribution (ATTRIBUTION.md, get_attribution())
|
|
11
|
+
Layer 2: Cryptographic signing (this module)
|
|
12
|
+
Layer 3: Steganographic watermarking (qualixar_watermark.py)
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
import hashlib
|
|
16
|
+
import json
|
|
17
|
+
import time
|
|
18
|
+
from typing import Any, Dict
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
# Registry of all Qualixar research initiative tools
|
|
22
|
+
TOOL_REGISTRY: Dict[str, str] = {
|
|
23
|
+
"agentassert": "AgentAssert — Behavioral Contracts",
|
|
24
|
+
"agentassay": "AgentAssay — Stochastic Testing",
|
|
25
|
+
"skillfortify": "SkillFortify — Security Validation",
|
|
26
|
+
"superlocalmemory": "SuperLocalMemory — Agent Memory",
|
|
27
|
+
"telephonebench": "TelephoneBench — Communication Benchmarks",
|
|
28
|
+
"vibecheck": "VibeCheck — Code Reliability",
|
|
29
|
+
"agentreplay": "AgentReplay — Time-Travel Debugging",
|
|
30
|
+
"agentchaos": "AgentChaos — Chaos Engineering",
|
|
31
|
+
"agentmigrate": "AgentMigrate — Migration Engineering",
|
|
32
|
+
"agentpact": "AgentPact — Composition Testing",
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class QualixarSigner:
|
|
37
|
+
"""Signs tool outputs with cryptographic provenance metadata.
|
|
38
|
+
|
|
39
|
+
Adds a ``_qualixar`` block to output dictionaries containing a SHA-256
|
|
40
|
+
content hash, timestamp, tool information, and a verifiable signature.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
tool_name: Identifier for the tool (e.g. ``"superlocalmemory"``).
|
|
44
|
+
version: Semantic version string of the tool.
|
|
45
|
+
|
|
46
|
+
Example::
|
|
47
|
+
|
|
48
|
+
signer = QualixarSigner("superlocalmemory", "2.8.3")
|
|
49
|
+
signed = signer.sign({"memories": [...]})
|
|
50
|
+
assert QualixarSigner.verify(signed) is True
|
|
51
|
+
"""
|
|
52
|
+
|
|
53
|
+
def __init__(self, tool_name: str, version: str) -> None:
|
|
54
|
+
self.tool_name = tool_name
|
|
55
|
+
self.version = version
|
|
56
|
+
self.tool_desc = TOOL_REGISTRY.get(tool_name, tool_name)
|
|
57
|
+
|
|
58
|
+
def sign(self, output_data: Any) -> Dict[str, Any]:
|
|
59
|
+
"""Add cryptographic provenance to any output.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
output_data: The data to sign. If a dict, provenance is added
|
|
63
|
+
in-place. Otherwise wrapped in ``{"data": ...}``.
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
Dictionary with the original data plus a ``_qualixar`` provenance
|
|
67
|
+
block containing content_hash, timestamp, and signature.
|
|
68
|
+
"""
|
|
69
|
+
timestamp = time.time()
|
|
70
|
+
canonical = json.dumps(output_data, sort_keys=True, default=str)
|
|
71
|
+
content_hash = hashlib.sha256(canonical.encode()).hexdigest()
|
|
72
|
+
|
|
73
|
+
provenance = {
|
|
74
|
+
"_qualixar": {
|
|
75
|
+
"tool": self.tool_name,
|
|
76
|
+
"tool_description": self.tool_desc,
|
|
77
|
+
"version": self.version,
|
|
78
|
+
"platform": "Qualixar",
|
|
79
|
+
"timestamp": timestamp,
|
|
80
|
+
"content_hash": content_hash,
|
|
81
|
+
"license": "MIT",
|
|
82
|
+
"attribution": (
|
|
83
|
+
f"Generated by {self.tool_desc} v{self.version}"
|
|
84
|
+
),
|
|
85
|
+
"signature": self._compute_signature(
|
|
86
|
+
content_hash, timestamp
|
|
87
|
+
),
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
if isinstance(output_data, dict):
|
|
92
|
+
output_data.update(provenance)
|
|
93
|
+
return output_data
|
|
94
|
+
return {"data": output_data, **provenance}
|
|
95
|
+
|
|
96
|
+
def _compute_signature(
|
|
97
|
+
self, content_hash: str, timestamp: float
|
|
98
|
+
) -> str:
|
|
99
|
+
"""Compute HMAC-style signature over content hash and timestamp.
|
|
100
|
+
|
|
101
|
+
In production, this should use Ed25519 with a private key.
|
|
102
|
+
The current implementation uses SHA-256 over a canonical string
|
|
103
|
+
as a lightweight integrity check.
|
|
104
|
+
|
|
105
|
+
Args:
|
|
106
|
+
content_hash: SHA-256 hex digest of the canonical content.
|
|
107
|
+
timestamp: Unix timestamp of signing.
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
Hex-encoded SHA-256 signature string.
|
|
111
|
+
"""
|
|
112
|
+
sig_input = (
|
|
113
|
+
f"{self.tool_name}:{self.version}:"
|
|
114
|
+
f"{content_hash}:{timestamp}"
|
|
115
|
+
)
|
|
116
|
+
return hashlib.sha256(sig_input.encode()).hexdigest()
|
|
117
|
+
|
|
118
|
+
@staticmethod
|
|
119
|
+
def verify(output_data: Dict[str, Any]) -> bool:
|
|
120
|
+
"""Verify an output's provenance signature.
|
|
121
|
+
|
|
122
|
+
Re-computes the content hash from the non-provenance fields and
|
|
123
|
+
compares it against the hash stored in the ``_qualixar`` block.
|
|
124
|
+
|
|
125
|
+
Args:
|
|
126
|
+
output_data: A dictionary previously signed with :meth:`sign`.
|
|
127
|
+
|
|
128
|
+
Returns:
|
|
129
|
+
``True`` if the content hash matches, ``False`` otherwise.
|
|
130
|
+
"""
|
|
131
|
+
prov = output_data.get("_qualixar", {})
|
|
132
|
+
if not prov:
|
|
133
|
+
return False
|
|
134
|
+
content = {
|
|
135
|
+
k: v for k, v in output_data.items() if k != "_qualixar"
|
|
136
|
+
}
|
|
137
|
+
canonical = json.dumps(content, sort_keys=True, default=str)
|
|
138
|
+
expected_hash = hashlib.sha256(canonical.encode()).hexdigest()
|
|
139
|
+
return prov.get("content_hash") == expected_hash
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# SPDX-License-Identifier: MIT
|
|
3
|
+
# Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
|
|
4
|
+
# Part of Qualixar — Advancing Agent Development Through Research
|
|
5
|
+
"""
|
|
6
|
+
Qualixar Watermark — Steganographic attribution for text outputs.
|
|
7
|
+
|
|
8
|
+
Embeds invisible zero-width Unicode characters in text to encode a tool
|
|
9
|
+
identifier. The watermark is invisible to human readers but can be
|
|
10
|
+
extracted programmatically to verify provenance.
|
|
11
|
+
|
|
12
|
+
Part of the 3-layer Qualixar attribution system:
|
|
13
|
+
Layer 1: Visible attribution (ATTRIBUTION.md, get_attribution())
|
|
14
|
+
Layer 2: Cryptographic signing (qualixar_attribution.py)
|
|
15
|
+
Layer 3: Steganographic watermarking (this module)
|
|
16
|
+
|
|
17
|
+
No external dependencies required.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
# Zero-width characters for binary encoding
|
|
21
|
+
ZW_SPACE = '\u200b' # Zero-width space = bit 0
|
|
22
|
+
ZW_JOINER = '\u200d' # Zero-width joiner = bit 1
|
|
23
|
+
ZW_SEP = '\ufeff' # Byte order mark = separator
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def encode_watermark(text: str, tool_id: str) -> str:
|
|
27
|
+
"""Embed an invisible watermark in text output.
|
|
28
|
+
|
|
29
|
+
Converts ``tool_id`` to binary and encodes each bit as a zero-width
|
|
30
|
+
Unicode character. The watermark is inserted after the first paragraph
|
|
31
|
+
break (``\\n\\n``) so it remains invisible to human readers.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
text: The text to watermark.
|
|
35
|
+
tool_id: Short identifier to embed (e.g. ``"slm"``).
|
|
36
|
+
|
|
37
|
+
Returns:
|
|
38
|
+
The original text with the invisible watermark inserted.
|
|
39
|
+
"""
|
|
40
|
+
binary = ''.join(format(ord(c), '08b') for c in tool_id)
|
|
41
|
+
watermark = ZW_SEP
|
|
42
|
+
for bit in binary:
|
|
43
|
+
watermark += ZW_SPACE if bit == '0' else ZW_JOINER
|
|
44
|
+
watermark += ZW_SEP
|
|
45
|
+
|
|
46
|
+
# Insert after first paragraph break (invisible to users)
|
|
47
|
+
if '\n\n' in text:
|
|
48
|
+
idx = text.index('\n\n') + 2
|
|
49
|
+
return text[:idx] + watermark + text[idx:]
|
|
50
|
+
return text + watermark
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def decode_watermark(text: str) -> str:
|
|
54
|
+
"""Extract a hidden watermark from text.
|
|
55
|
+
|
|
56
|
+
Locates the zero-width separator characters and decodes the binary
|
|
57
|
+
payload between them back into the original tool identifier string.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
text: Text that may contain a watermark.
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
The decoded tool identifier, or an empty string if no watermark
|
|
64
|
+
is found.
|
|
65
|
+
"""
|
|
66
|
+
start = text.find(ZW_SEP)
|
|
67
|
+
if start == -1:
|
|
68
|
+
return ""
|
|
69
|
+
end = text.find(ZW_SEP, start + 1)
|
|
70
|
+
if end == -1:
|
|
71
|
+
return ""
|
|
72
|
+
encoded = text[start + 1:end]
|
|
73
|
+
binary = ''.join(
|
|
74
|
+
'0' if c == ZW_SPACE else '1'
|
|
75
|
+
for c in encoded
|
|
76
|
+
)
|
|
77
|
+
chars = [binary[i:i + 8] for i in range(0, len(binary), 8)]
|
|
78
|
+
return ''.join(chr(int(b, 2)) for b in chars if len(b) == 8)
|
package/src/search/engine.py
CHANGED
|
@@ -172,20 +172,22 @@ class HybridSearchEngine(IndexLoaderMixin, SearchMethodsMixin, FusionMixin):
|
|
|
172
172
|
id_to_score = {mem_id: score for mem_id, score in raw_results}
|
|
173
173
|
|
|
174
174
|
conn = sqlite3.connect(self.db_path)
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
175
|
+
try:
|
|
176
|
+
cursor = conn.cursor()
|
|
177
|
+
|
|
178
|
+
# Fetch memories
|
|
179
|
+
placeholders = ','.join(['?'] * len(memory_ids))
|
|
180
|
+
cursor.execute(f'''
|
|
181
|
+
SELECT id, content, summary, project_path, project_name, tags,
|
|
182
|
+
category, parent_id, tree_path, depth, memory_type,
|
|
183
|
+
importance, created_at, cluster_id, last_accessed, access_count
|
|
184
|
+
FROM memories
|
|
185
|
+
WHERE id IN ({placeholders})
|
|
186
|
+
''', memory_ids)
|
|
187
|
+
|
|
188
|
+
rows = cursor.fetchall()
|
|
189
|
+
finally:
|
|
190
|
+
conn.close()
|
|
189
191
|
|
|
190
192
|
# Build result dictionaries
|
|
191
193
|
results = []
|
|
@@ -31,17 +31,19 @@ class IndexLoaderMixin:
|
|
|
31
31
|
Load documents from database and build search indexes.
|
|
32
32
|
"""
|
|
33
33
|
conn = sqlite3.connect(self.db_path)
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
34
|
+
try:
|
|
35
|
+
cursor = conn.cursor()
|
|
36
|
+
|
|
37
|
+
# Fetch all memories
|
|
38
|
+
cursor.execute('''
|
|
39
|
+
SELECT id, content, summary, tags
|
|
40
|
+
FROM memories
|
|
41
|
+
ORDER BY id
|
|
42
|
+
''')
|
|
43
|
+
|
|
44
|
+
rows = cursor.fetchall()
|
|
45
|
+
finally:
|
|
46
|
+
conn.close()
|
|
45
47
|
|
|
46
48
|
if not rows:
|
|
47
49
|
return
|
package/src/setup_validator.py
CHANGED
|
@@ -144,7 +144,7 @@ def check_database() -> Tuple[bool, str, List[str]]:
|
|
|
144
144
|
try:
|
|
145
145
|
cursor.execute("SELECT COUNT(*) FROM memories")
|
|
146
146
|
memory_count = cursor.fetchone()[0]
|
|
147
|
-
except:
|
|
147
|
+
except Exception:
|
|
148
148
|
memory_count = 0
|
|
149
149
|
|
|
150
150
|
conn.close()
|
|
@@ -192,163 +192,165 @@ def initialize_database() -> Tuple[bool, str]:
|
|
|
192
192
|
MEMORY_DIR.mkdir(parents=True, exist_ok=True)
|
|
193
193
|
|
|
194
194
|
conn = sqlite3.connect(DB_PATH)
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
195
|
+
try:
|
|
196
|
+
cursor = conn.cursor()
|
|
197
|
+
|
|
198
|
+
# Create memories table (core)
|
|
199
|
+
cursor.execute('''
|
|
200
|
+
CREATE TABLE IF NOT EXISTS memories (
|
|
201
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
202
|
+
content TEXT NOT NULL,
|
|
203
|
+
summary TEXT,
|
|
204
|
+
project_path TEXT,
|
|
205
|
+
project_name TEXT,
|
|
206
|
+
tags TEXT DEFAULT '[]',
|
|
207
|
+
category TEXT,
|
|
208
|
+
parent_id INTEGER,
|
|
209
|
+
tree_path TEXT DEFAULT '/',
|
|
210
|
+
depth INTEGER DEFAULT 0,
|
|
211
|
+
memory_type TEXT DEFAULT 'session',
|
|
212
|
+
importance INTEGER DEFAULT 5,
|
|
213
|
+
content_hash TEXT,
|
|
214
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
215
|
+
last_accessed TIMESTAMP,
|
|
216
|
+
access_count INTEGER DEFAULT 0,
|
|
217
|
+
compressed_at TIMESTAMP,
|
|
218
|
+
tier INTEGER DEFAULT 1,
|
|
219
|
+
cluster_id INTEGER,
|
|
220
|
+
FOREIGN KEY (parent_id) REFERENCES memories(id)
|
|
221
|
+
)
|
|
222
|
+
''')
|
|
223
|
+
|
|
224
|
+
# Create graph tables
|
|
225
|
+
cursor.execute('''
|
|
226
|
+
CREATE TABLE IF NOT EXISTS graph_nodes (
|
|
227
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
228
|
+
memory_id INTEGER UNIQUE NOT NULL,
|
|
229
|
+
entities TEXT DEFAULT '[]',
|
|
230
|
+
embedding_vector BLOB,
|
|
231
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
232
|
+
FOREIGN KEY (memory_id) REFERENCES memories(id)
|
|
233
|
+
)
|
|
234
|
+
''')
|
|
235
|
+
|
|
236
|
+
cursor.execute('''
|
|
237
|
+
CREATE TABLE IF NOT EXISTS graph_edges (
|
|
238
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
239
|
+
source_memory_id INTEGER NOT NULL,
|
|
240
|
+
target_memory_id INTEGER NOT NULL,
|
|
241
|
+
similarity REAL NOT NULL,
|
|
242
|
+
relationship_type TEXT,
|
|
243
|
+
shared_entities TEXT DEFAULT '[]',
|
|
244
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
245
|
+
FOREIGN KEY (source_memory_id) REFERENCES memories(id),
|
|
246
|
+
FOREIGN KEY (target_memory_id) REFERENCES memories(id),
|
|
247
|
+
UNIQUE(source_memory_id, target_memory_id)
|
|
248
|
+
)
|
|
249
|
+
''')
|
|
250
|
+
|
|
251
|
+
cursor.execute('''
|
|
252
|
+
CREATE TABLE IF NOT EXISTS graph_clusters (
|
|
253
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
254
|
+
cluster_name TEXT,
|
|
255
|
+
name TEXT,
|
|
256
|
+
description TEXT,
|
|
257
|
+
summary TEXT,
|
|
258
|
+
memory_count INTEGER DEFAULT 0,
|
|
259
|
+
member_count INTEGER DEFAULT 0,
|
|
260
|
+
avg_importance REAL DEFAULT 5.0,
|
|
261
|
+
top_entities TEXT DEFAULT '[]',
|
|
262
|
+
parent_cluster_id INTEGER,
|
|
263
|
+
depth INTEGER DEFAULT 0,
|
|
264
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
265
|
+
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
266
|
+
FOREIGN KEY (parent_cluster_id) REFERENCES graph_clusters(id) ON DELETE SET NULL
|
|
267
|
+
)
|
|
268
|
+
''')
|
|
269
|
+
|
|
270
|
+
# Create pattern learning tables
|
|
271
|
+
cursor.execute('''
|
|
272
|
+
CREATE TABLE IF NOT EXISTS identity_patterns (
|
|
273
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
274
|
+
pattern_type TEXT NOT NULL,
|
|
275
|
+
pattern_key TEXT NOT NULL,
|
|
276
|
+
pattern_value TEXT,
|
|
277
|
+
confidence REAL DEFAULT 0.0,
|
|
278
|
+
frequency INTEGER DEFAULT 1,
|
|
279
|
+
last_seen TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
280
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
281
|
+
UNIQUE(pattern_type, pattern_key)
|
|
282
|
+
)
|
|
283
|
+
''')
|
|
284
|
+
|
|
285
|
+
cursor.execute('''
|
|
286
|
+
CREATE TABLE IF NOT EXISTS pattern_examples (
|
|
287
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
288
|
+
pattern_id INTEGER NOT NULL,
|
|
289
|
+
memory_id INTEGER NOT NULL,
|
|
290
|
+
context TEXT,
|
|
291
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
292
|
+
FOREIGN KEY (pattern_id) REFERENCES identity_patterns(id),
|
|
293
|
+
FOREIGN KEY (memory_id) REFERENCES memories(id)
|
|
294
|
+
)
|
|
295
|
+
''')
|
|
296
|
+
|
|
297
|
+
# Create tree table
|
|
298
|
+
cursor.execute('''
|
|
299
|
+
CREATE TABLE IF NOT EXISTS memory_tree (
|
|
300
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
301
|
+
node_type TEXT NOT NULL,
|
|
302
|
+
name TEXT NOT NULL,
|
|
303
|
+
parent_id INTEGER,
|
|
304
|
+
tree_path TEXT DEFAULT '/',
|
|
305
|
+
depth INTEGER DEFAULT 0,
|
|
306
|
+
memory_count INTEGER DEFAULT 0,
|
|
307
|
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
308
|
+
FOREIGN KEY (parent_id) REFERENCES memory_tree(id)
|
|
309
|
+
)
|
|
310
|
+
''')
|
|
311
|
+
|
|
312
|
+
# Create archive table
|
|
313
|
+
cursor.execute('''
|
|
314
|
+
CREATE TABLE IF NOT EXISTS memory_archive (
|
|
315
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
316
|
+
original_memory_id INTEGER,
|
|
317
|
+
compressed_content TEXT NOT NULL,
|
|
318
|
+
compression_type TEXT DEFAULT 'tier2',
|
|
319
|
+
original_size INTEGER,
|
|
320
|
+
compressed_size INTEGER,
|
|
321
|
+
archived_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
322
|
+
)
|
|
323
|
+
''')
|
|
324
|
+
|
|
325
|
+
# Create system metadata table for watermarking
|
|
326
|
+
cursor.execute('''
|
|
327
|
+
CREATE TABLE IF NOT EXISTS system_metadata (
|
|
328
|
+
key TEXT PRIMARY KEY,
|
|
329
|
+
value TEXT NOT NULL
|
|
330
|
+
)
|
|
331
|
+
''')
|
|
332
|
+
|
|
333
|
+
# Add system watermark
|
|
334
|
+
cursor.execute('''
|
|
335
|
+
INSERT OR REPLACE INTO system_metadata (key, value) VALUES
|
|
336
|
+
('product', 'SuperLocalMemory'),
|
|
337
|
+
('website', 'https://superlocalmemory.com'),
|
|
338
|
+
('repository', 'https://github.com/varun369/SuperLocalMemoryV2'),
|
|
339
|
+
('license', 'MIT'),
|
|
340
|
+
('schema_version', '2.0.0')
|
|
341
|
+
''')
|
|
342
|
+
|
|
343
|
+
# Create indexes for performance
|
|
344
|
+
cursor.execute('CREATE INDEX IF NOT EXISTS idx_memories_project ON memories(project_name)')
|
|
345
|
+
cursor.execute('CREATE INDEX IF NOT EXISTS idx_memories_category ON memories(category)')
|
|
346
|
+
cursor.execute('CREATE INDEX IF NOT EXISTS idx_memories_cluster ON memories(cluster_id)')
|
|
347
|
+
cursor.execute('CREATE INDEX IF NOT EXISTS idx_memories_hash ON memories(content_hash)')
|
|
348
|
+
cursor.execute('CREATE INDEX IF NOT EXISTS idx_graph_edges_source ON graph_edges(source_memory_id)')
|
|
349
|
+
cursor.execute('CREATE INDEX IF NOT EXISTS idx_graph_edges_target ON graph_edges(target_memory_id)')
|
|
350
|
+
|
|
351
|
+
conn.commit()
|
|
352
|
+
finally:
|
|
353
|
+
conn.close()
|
|
352
354
|
|
|
353
355
|
return True, "Database initialized successfully"
|
|
354
356
|
|
|
@@ -425,9 +427,9 @@ def validate_setup(auto_fix: bool = False) -> bool:
|
|
|
425
427
|
print("\n✓ All required checks passed!")
|
|
426
428
|
print("\nQuick Start Commands:")
|
|
427
429
|
print(" 1. Add a memory:")
|
|
428
|
-
print(" superlocalmemoryv2
|
|
430
|
+
print(" superlocalmemoryv2-remember 'Your content here'")
|
|
429
431
|
print("\n 2. Search memories:")
|
|
430
|
-
print(" superlocalmemoryv2
|
|
432
|
+
print(" superlocalmemoryv2-recall 'search query'")
|
|
431
433
|
print("\n 3. Build knowledge graph (after adding 2+ memories):")
|
|
432
434
|
print(" python ~/.claude-memory/graph_engine.py build")
|
|
433
435
|
print("\n 4. Start UI server:")
|