superlocalmemory 2.8.0 → 2.8.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/ATTRIBUTION.md +50 -0
  2. package/CHANGELOG.md +8 -0
  3. package/README.md +24 -15
  4. package/api_server.py +23 -0
  5. package/bin/aider-smart +2 -2
  6. package/bin/slm +35 -15
  7. package/configs/continue-skills.yaml +4 -4
  8. package/docs/ARCHITECTURE.md +3 -3
  9. package/docs/CLI-COMMANDS-REFERENCE.md +18 -18
  10. package/docs/FRAMEWORK-INTEGRATIONS.md +4 -4
  11. package/docs/UNIVERSAL-INTEGRATION.md +15 -15
  12. package/install.sh +19 -4
  13. package/mcp_server.py +39 -4
  14. package/package.json +4 -2
  15. package/skills/slm-list-recent/SKILL.md +1 -1
  16. package/skills/slm-remember/SKILL.md +1 -1
  17. package/skills/slm-status/SKILL.md +1 -1
  18. package/skills/slm-switch-profile/SKILL.md +3 -3
  19. package/src/graph/graph_core.py +3 -3
  20. package/src/hnsw_index.py +10 -4
  21. package/src/lifecycle/lifecycle_engine.py +61 -8
  22. package/src/lifecycle/lifecycle_evaluator.py +38 -6
  23. package/src/mcp_tools_v28.py +4 -3
  24. package/src/memory-profiles.py +1 -0
  25. package/src/memory_store_v2.py +6 -1
  26. package/src/qualixar_attribution.py +139 -0
  27. package/src/qualixar_watermark.py +78 -0
  28. package/src/setup_validator.py +2 -2
  29. package/ui/index.html +152 -4
  30. package/ui/js/behavioral.js +276 -0
  31. package/ui/js/compliance.js +252 -0
  32. package/ui/js/init.js +10 -0
  33. package/ui/js/lifecycle.js +298 -0
  34. package/ui/js/profiles.js +4 -0
  35. package/ui_server.py +19 -0
  36. /package/bin/{superlocalmemoryv2:learning → superlocalmemoryv2-learning} +0 -0
  37. /package/bin/{superlocalmemoryv2:list → superlocalmemoryv2-list} +0 -0
  38. /package/bin/{superlocalmemoryv2:patterns → superlocalmemoryv2-patterns} +0 -0
  39. /package/bin/{superlocalmemoryv2:profile → superlocalmemoryv2-profile} +0 -0
  40. /package/bin/{superlocalmemoryv2:recall → superlocalmemoryv2-recall} +0 -0
  41. /package/bin/{superlocalmemoryv2:remember → superlocalmemoryv2-remember} +0 -0
  42. /package/bin/{superlocalmemoryv2:reset → superlocalmemoryv2-reset} +0 -0
  43. /package/bin/{superlocalmemoryv2:status → superlocalmemoryv2-status} +0 -0
package/mcp_server.py CHANGED
@@ -560,6 +560,36 @@ def _maybe_passive_decay() -> None:
560
560
  pass
561
561
 
562
562
 
563
+ # ============================================================================
564
+ # Eager initialization — ensure schema migration runs at startup (v2.8)
565
+ # ============================================================================
566
+
567
+ def _eager_init():
568
+ """Initialize all engines at startup. Ensures schema migration runs."""
569
+ try:
570
+ get_store() # Triggers MemoryStoreV2._init_db() which creates v2.8 columns
571
+ except Exception:
572
+ pass # Don't block server startup
573
+ try:
574
+ from lifecycle.lifecycle_engine import LifecycleEngine
575
+ LifecycleEngine() # Triggers _ensure_columns()
576
+ except Exception:
577
+ pass
578
+ try:
579
+ from behavioral.outcome_tracker import OutcomeTracker
580
+ OutcomeTracker(str(Path.home() / ".claude-memory" / "learning.db"))
581
+ except Exception:
582
+ pass
583
+ try:
584
+ from compliance.audit_db import AuditDB
585
+ AuditDB(str(Path.home() / ".claude-memory" / "audit.db"))
586
+ except Exception:
587
+ pass
588
+
589
+ # Run once at module load
590
+ _eager_init()
591
+
592
+
563
593
  # ============================================================================
564
594
  # MCP TOOLS (Functions callable by AI)
565
595
  # ============================================================================
@@ -579,13 +609,13 @@ async def remember(
579
609
  """
580
610
  Save content to SuperLocalMemory with intelligent indexing.
581
611
 
582
- This calls the SAME backend as /superlocalmemoryv2:remember skill.
612
+ This calls the SAME backend as /superlocalmemoryv2-remember skill.
583
613
  All memories are stored in the same local SQLite database.
584
614
 
585
615
  Args:
586
616
  content: The content to remember (required)
587
617
  tags: Comma-separated tags (optional, e.g. "python,api,backend")
588
- project: Project name (optional, groups related memories)
618
+ project: Project name to scope the memory
589
619
  importance: Importance score 1-10 (default 5)
590
620
 
591
621
  Returns:
@@ -1394,7 +1424,12 @@ try:
1394
1424
 
1395
1425
  @mcp.tool(annotations=ToolAnnotations(readOnlyHint=False, destructiveHint=False))
1396
1426
  async def compact_memories(dry_run: bool = True, profile: str = None) -> dict:
1397
- """Evaluate and compact stale memories through lifecycle transitions. dry_run=True by default."""
1427
+ """Evaluate and compact stale memories through lifecycle transitions. dry_run=True by default.
1428
+
1429
+ Args:
1430
+ dry_run: If True (default), show what would happen without changes.
1431
+ profile: Profile name to filter.
1432
+ """
1398
1433
  return await _compact_memories(dry_run, profile)
1399
1434
 
1400
1435
  @mcp.tool(annotations=ToolAnnotations(readOnlyHint=True, destructiveHint=False))
@@ -1649,7 +1684,7 @@ if __name__ == "__main__":
1649
1684
  print(" - list_recent(limit)", file=sys.stderr)
1650
1685
  print(" - get_status()", file=sys.stderr)
1651
1686
  print(" - build_graph()", file=sys.stderr)
1652
- print(" - switch_profile(name)", file=sys.stderr)
1687
+ print(" - switch_profile(name) [Project/Profile switch]", file=sys.stderr)
1653
1688
  print(" - backup_status() [Auto-Backup]", file=sys.stderr)
1654
1689
  if LEARNING_AVAILABLE:
1655
1690
  print(" - memory_used(memory_id, query, usefulness) [v2.7 Learning]", file=sys.stderr)
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "superlocalmemory",
3
- "version": "2.8.0",
3
+ "version": "2.8.2",
4
4
  "description": "Your AI Finally Remembers You - Local-first intelligent memory system for AI assistants. Works with Claude, Cursor, Windsurf, VS Code/Copilot, Codex, and 17+ AI tools. 100% local, zero cloud dependencies.",
5
5
  "keywords": [
6
6
  "ai-memory",
@@ -22,7 +22,9 @@
22
22
  "chatgpt",
23
23
  "chatgpt-connector",
24
24
  "openai",
25
- "deep-research"
25
+ "deep-research",
26
+ "qualixar",
27
+ "agent-development-platform"
26
28
  ],
27
29
  "author": {
28
30
  "name": "Varun Pratap Bhardwaj",
@@ -336,7 +336,7 @@ date
336
336
  - `slm remember` - Save a new memory
337
337
  - `slm recall` - Search memories by relevance
338
338
  - `slm status` - Check memory count and stats
339
- - `slm switch-profile` - View different project's memories
339
+ - `slm switch-profile` - View different profile's memories
340
340
 
341
341
  ---
342
342
 
@@ -157,7 +157,7 @@ slm remember "Commit: $commit_msg (${commit_hash:0:7})" \
157
157
  - **Cross-tool sync:** All AI tools access same database (Cursor, ChatGPT, Claude, etc.)
158
158
  - **Unlimited:** No memory limits, no quotas
159
159
  - **Privacy:** Your data stays on your computer
160
- - **Profiles:** Use `slm switch-profile` for project isolation
160
+ - **Profiles:** Use `slm switch-profile` for profile isolation
161
161
 
162
162
  ## Related Commands
163
163
 
@@ -155,7 +155,7 @@ Corrupted Entries: 0 found
155
155
  - **Last Used:** Last access timestamp
156
156
 
157
157
  **Profiles allow:**
158
- - Project isolation
158
+ - Profile isolation
159
159
  - Context switching
160
160
  - Separate memory spaces
161
161
 
@@ -1,6 +1,6 @@
1
1
  ---
2
2
  name: slm-switch-profile
3
- description: Switch between memory profiles for project isolation and context management. Use when the user wants to change project context, separate work/personal memories, or manage multiple independent memory spaces. Each profile has its own database, graph, and patterns.
3
+ description: Switch between memory profiles for context isolation and management. Use when the user wants to change profile context, separate work/personal memories, or manage multiple independent memory spaces. Each profile has its own database, graph, and patterns.
4
4
  version: "2.1.0"
5
5
  license: MIT
6
6
  compatibility: "Requires SuperLocalMemory V2 installed at ~/.claude-memory/"
@@ -26,8 +26,8 @@ slm switch-profile <name> [--create]
26
26
 
27
27
  Each profile has its own:
28
28
  - ✅ **Separate database** - Zero context bleeding
29
- - ✅ **Independent knowledge graph** - Project-specific relationships
30
- - ✅ **Unique patterns** - Different coding preferences per project
29
+ - ✅ **Independent knowledge graph** - Profile-specific relationships
30
+ - ✅ **Unique patterns** - Different coding preferences per profile
31
31
  - ✅ **Isolated history** - No cross-contamination
32
32
 
33
33
  **Think of profiles as workspaces:**
@@ -80,7 +80,7 @@ class GraphEngine:
80
80
  logger.error(f"Missing required tables: {missing}")
81
81
  return {'success': False, 'error': 'database_not_initialized',
82
82
  'message': f"Database not initialized. Missing tables: {', '.join(missing)}",
83
- 'fix': "Run 'superlocalmemoryv2:status' first to initialize the database, or add some memories."}
83
+ 'fix': "Run 'superlocalmemoryv2-status' first to initialize the database, or add some memories."}
84
84
 
85
85
  active_profile = self._get_active_profile()
86
86
  logger.info(f"Building graph for profile: {active_profile}")
@@ -91,12 +91,12 @@ class GraphEngine:
91
91
  if len(memories) == 0:
92
92
  return {'success': False, 'error': 'no_memories',
93
93
  'message': 'No memories found in database.',
94
- 'fix': "Add some memories first: superlocalmemoryv2:remember 'Your content here'"}
94
+ 'fix': "Add some memories first: superlocalmemoryv2-remember 'Your content here'"}
95
95
  if len(memories) < 2:
96
96
  return {'success': False, 'error': 'insufficient_memories',
97
97
  'message': 'Need at least 2 memories to build knowledge graph.',
98
98
  'memories': len(memories),
99
- 'fix': "Add more memories: superlocalmemoryv2:remember 'Your content here'"}
99
+ 'fix': "Add more memories: superlocalmemoryv2-remember 'Your content here'"}
100
100
 
101
101
  memories = apply_sampling(cursor, memories, active_profile)
102
102
  clear_profile_graph_data(cursor, conn, memories, active_profile)
package/src/hnsw_index.py CHANGED
@@ -139,12 +139,18 @@ class HNSWIndex:
139
139
  with open(self.metadata_path, 'r') as f:
140
140
  metadata = json.load(f)
141
141
 
142
- # Validate metadata
142
+ # Validate metadata — auto-rebuild on dimension mismatch
143
143
  if metadata.get('dimension') != self.dimension:
144
- logger.warning(
145
- f"Index dimension mismatch: {metadata.get('dimension')} != {self.dimension}. "
146
- "Will rebuild index."
144
+ logger.info(
145
+ "Index dimension changed: %s -> %s. "
146
+ "Deleting old index files and rebuilding.",
147
+ metadata.get('dimension'), self.dimension,
147
148
  )
149
+ try:
150
+ self.index_path.unlink(missing_ok=True)
151
+ self.metadata_path.unlink(missing_ok=True)
152
+ except OSError as del_err:
153
+ logger.warning("Could not delete old index files: %s", del_err)
148
154
  return
149
155
 
150
156
  # Load HNSW index
@@ -38,6 +38,7 @@ class LifecycleEngine:
38
38
  self._db_path = str(db_path)
39
39
  self._config_path = config_path
40
40
  self._lock = threading.Lock()
41
+ self._ensure_columns()
41
42
 
42
43
  def _get_connection(self) -> sqlite3.Connection:
43
44
  """Get a SQLite connection to memory.db."""
@@ -45,6 +46,34 @@ class LifecycleEngine:
45
46
  conn.row_factory = sqlite3.Row
46
47
  return conn
47
48
 
49
+ def _ensure_columns(self) -> None:
50
+ """Ensure v2.8 lifecycle columns exist in memories table."""
51
+ try:
52
+ conn = self._get_connection()
53
+ try:
54
+ cursor = conn.cursor()
55
+ cursor.execute("PRAGMA table_info(memories)")
56
+ existing = {row[1] for row in cursor.fetchall()}
57
+ v28_cols = [
58
+ ("lifecycle_state", "TEXT DEFAULT 'active'"),
59
+ ("lifecycle_updated_at", "TIMESTAMP"),
60
+ ("lifecycle_history", "TEXT DEFAULT '[]'"),
61
+ ("access_level", "TEXT DEFAULT 'public'"),
62
+ ]
63
+ for col_name, col_type in v28_cols:
64
+ if col_name not in existing:
65
+ try:
66
+ cursor.execute(
67
+ f"ALTER TABLE memories ADD COLUMN {col_name} {col_type}"
68
+ )
69
+ except sqlite3.OperationalError:
70
+ pass
71
+ conn.commit()
72
+ finally:
73
+ conn.close()
74
+ except Exception:
75
+ pass # Graceful degradation — don't block engine init
76
+
48
77
  def is_valid_transition(self, from_state: str, to_state: str) -> bool:
49
78
  """Check if a state transition is valid per the state machine.
50
79
 
@@ -70,10 +99,22 @@ class LifecycleEngine:
70
99
  """
71
100
  conn = self._get_connection()
72
101
  try:
73
- row = conn.execute(
74
- "SELECT lifecycle_state FROM memories WHERE id = ?",
75
- (memory_id,),
76
- ).fetchone()
102
+ try:
103
+ row = conn.execute(
104
+ "SELECT lifecycle_state FROM memories WHERE id = ?",
105
+ (memory_id,),
106
+ ).fetchone()
107
+ except sqlite3.OperationalError as e:
108
+ if "no such column" in str(e):
109
+ conn.close()
110
+ self._ensure_columns()
111
+ conn = self._get_connection()
112
+ row = conn.execute(
113
+ "SELECT lifecycle_state FROM memories WHERE id = ?",
114
+ (memory_id,),
115
+ ).fetchone()
116
+ else:
117
+ raise
77
118
  if row is None:
78
119
  return None
79
120
  return row["lifecycle_state"] or "active"
@@ -278,10 +319,22 @@ class LifecycleEngine:
278
319
  conn = self._get_connection()
279
320
  try:
280
321
  dist = {state: 0 for state in self.STATES}
281
- rows = conn.execute(
282
- "SELECT lifecycle_state, COUNT(*) as cnt "
283
- "FROM memories GROUP BY lifecycle_state"
284
- ).fetchall()
322
+ try:
323
+ rows = conn.execute(
324
+ "SELECT lifecycle_state, COUNT(*) as cnt "
325
+ "FROM memories GROUP BY lifecycle_state"
326
+ ).fetchall()
327
+ except sqlite3.OperationalError as e:
328
+ if "no such column" in str(e):
329
+ conn.close()
330
+ self._ensure_columns()
331
+ conn = self._get_connection()
332
+ rows = conn.execute(
333
+ "SELECT lifecycle_state, COUNT(*) as cnt "
334
+ "FROM memories GROUP BY lifecycle_state"
335
+ ).fetchall()
336
+ else:
337
+ raise
285
338
  for row in rows:
286
339
  state = row["lifecycle_state"] if row["lifecycle_state"] else "active"
287
340
  if state in dist:
@@ -59,6 +59,15 @@ class LifecycleEvaluator:
59
59
  conn.row_factory = sqlite3.Row
60
60
  return conn
61
61
 
62
+ def _ensure_lifecycle_columns(self) -> None:
63
+ """Ensure v2.8 lifecycle columns exist via LifecycleEngine."""
64
+ try:
65
+ from lifecycle.lifecycle_engine import LifecycleEngine
66
+ engine = LifecycleEngine(db_path=self._db_path)
67
+ engine._ensure_columns()
68
+ except Exception:
69
+ pass # Best effort — don't block evaluation
70
+
62
71
  def evaluate_memories(
63
72
  self,
64
73
  profile: Optional[str] = None,
@@ -87,7 +96,17 @@ class LifecycleEvaluator:
87
96
  query += " AND profile = ?"
88
97
  params.append(profile)
89
98
 
90
- rows = conn.execute(query, params).fetchall()
99
+ try:
100
+ rows = conn.execute(query, params).fetchall()
101
+ except sqlite3.OperationalError as e:
102
+ if "no such column" in str(e):
103
+ conn.close()
104
+ self._ensure_lifecycle_columns()
105
+ conn = self._get_connection()
106
+ rows = conn.execute(query, params).fetchall()
107
+ else:
108
+ raise
109
+
91
110
  recommendations = []
92
111
  now = datetime.now()
93
112
 
@@ -123,11 +142,24 @@ class LifecycleEvaluator:
123
142
  config = self._load_config()
124
143
  conn = self._get_connection()
125
144
  try:
126
- row = conn.execute(
127
- "SELECT id, lifecycle_state, importance, last_accessed, created_at "
128
- "FROM memories WHERE id = ?",
129
- (memory_id,),
130
- ).fetchone()
145
+ try:
146
+ row = conn.execute(
147
+ "SELECT id, lifecycle_state, importance, last_accessed, created_at "
148
+ "FROM memories WHERE id = ?",
149
+ (memory_id,),
150
+ ).fetchone()
151
+ except sqlite3.OperationalError as e:
152
+ if "no such column" in str(e):
153
+ conn.close()
154
+ self._ensure_lifecycle_columns()
155
+ conn = self._get_connection()
156
+ row = conn.execute(
157
+ "SELECT id, lifecycle_state, importance, last_accessed, created_at "
158
+ "FROM memories WHERE id = ?",
159
+ (memory_id,),
160
+ ).fetchone()
161
+ else:
162
+ raise
131
163
  if row is None:
132
164
  return None
133
165
  return self._evaluate_row(row, config, datetime.now())
@@ -46,7 +46,7 @@ async def report_outcome(
46
46
  action_type: Category (code_written, decision_made, debug_resolved, etc.).
47
47
  context: Optional JSON string with additional context metadata.
48
48
  agent_id: Identifier for the reporting agent.
49
- project: Project name for scoping.
49
+ project: Project name for scoping outcomes.
50
50
 
51
51
  Returns:
52
52
  Dict with success status and outcome_id on success.
@@ -170,17 +170,18 @@ async def compact_memories(
170
170
 
171
171
  Args:
172
172
  dry_run: If True (default), show what would happen without changes.
173
- profile: Optional profile filter.
173
+ profile: Optional profile filter to scope compaction.
174
174
 
175
175
  Returns:
176
176
  Dict with recommendations (dry_run=True) or transition counts (dry_run=False).
177
177
  """
178
+ active_profile = profile
178
179
  try:
179
180
  from lifecycle.lifecycle_evaluator import LifecycleEvaluator
180
181
  from lifecycle.lifecycle_engine import LifecycleEngine
181
182
 
182
183
  evaluator = LifecycleEvaluator(DEFAULT_MEMORY_DB)
183
- recommendations = evaluator.evaluate_memories(profile=profile)
184
+ recommendations = evaluator.evaluate_memories(profile=active_profile)
184
185
 
185
186
  if dry_run:
186
187
  return {
@@ -496,6 +496,7 @@ class ProfileManager:
496
496
  return True
497
497
 
498
498
 
499
+
499
500
  def main():
500
501
  parser = argparse.ArgumentParser(
501
502
  description='SuperLocalMemory V2 - Profile Management (Column-Based)',
@@ -1201,7 +1201,8 @@ class MemoryStoreV2:
1201
1201
  Removing or obscuring this attribution violates the license terms.
1202
1202
 
1203
1203
  Returns:
1204
- Dictionary with creator information and attribution requirements
1204
+ Dictionary with creator information and attribution requirements,
1205
+ including Qualixar platform provenance.
1205
1206
  """
1206
1207
  with self._read_connection() as conn:
1207
1208
  cursor = conn.cursor()
@@ -1218,6 +1219,10 @@ class MemoryStoreV2:
1218
1219
  'attribution_required': 'yes'
1219
1220
  }
1220
1221
 
1222
+ # Qualixar platform provenance (non-breaking additions)
1223
+ attribution['platform'] = 'Qualixar'
1224
+ attribution['verify_url'] = 'https://qualixar.com'
1225
+
1221
1226
  return attribution
1222
1227
 
1223
1228
  def export_for_context(self, query: str, max_tokens: int = 4000) -> str:
@@ -0,0 +1,139 @@
1
+ #!/usr/bin/env python3
2
+ # SPDX-License-Identifier: MIT
3
+ # Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
4
+ # Part of Qualixar — Advancing Agent Development Through Research
5
+ """
6
+ Qualixar Attribution — Cryptographic output signing for provenance tracking.
7
+
8
+ Signs tool outputs with SHA-256 content hashes and verifiable provenance
9
+ metadata. Part of the 3-layer Qualixar attribution system:
10
+ Layer 1: Visible attribution (ATTRIBUTION.md, get_attribution())
11
+ Layer 2: Cryptographic signing (this module)
12
+ Layer 3: Steganographic watermarking (qualixar_watermark.py)
13
+ """
14
+
15
+ import hashlib
16
+ import json
17
+ import time
18
+ from typing import Any, Dict
19
+
20
+
21
+ # Registry of all Qualixar research initiative tools
22
+ TOOL_REGISTRY: Dict[str, str] = {
23
+ "agentassert": "AgentAssert — Behavioral Contracts",
24
+ "agentassay": "AgentAssay — Stochastic Testing",
25
+ "skillfortify": "SkillFortify — Security Validation",
26
+ "superlocalmemory": "SuperLocalMemory — Agent Memory",
27
+ "telephonebench": "TelephoneBench — Communication Benchmarks",
28
+ "vibecheck": "VibeCheck — Code Reliability",
29
+ "agentreplay": "AgentReplay — Time-Travel Debugging",
30
+ "agentchaos": "AgentChaos — Chaos Engineering",
31
+ "agentmigrate": "AgentMigrate — Migration Engineering",
32
+ "agentpact": "AgentPact — Composition Testing",
33
+ }
34
+
35
+
36
+ class QualixarSigner:
37
+ """Signs tool outputs with cryptographic provenance metadata.
38
+
39
+ Adds a ``_qualixar`` block to output dictionaries containing a SHA-256
40
+ content hash, timestamp, tool information, and a verifiable signature.
41
+
42
+ Args:
43
+ tool_name: Identifier for the tool (e.g. ``"superlocalmemory"``).
44
+ version: Semantic version string of the tool.
45
+
46
+ Example::
47
+
48
+ signer = QualixarSigner("superlocalmemory", "2.8.1")
49
+ signed = signer.sign({"memories": [...]})
50
+ assert QualixarSigner.verify(signed) is True
51
+ """
52
+
53
+ def __init__(self, tool_name: str, version: str) -> None:
54
+ self.tool_name = tool_name
55
+ self.version = version
56
+ self.tool_desc = TOOL_REGISTRY.get(tool_name, tool_name)
57
+
58
+ def sign(self, output_data: Any) -> Dict[str, Any]:
59
+ """Add cryptographic provenance to any output.
60
+
61
+ Args:
62
+ output_data: The data to sign. If a dict, provenance is added
63
+ in-place. Otherwise wrapped in ``{"data": ...}``.
64
+
65
+ Returns:
66
+ Dictionary with the original data plus a ``_qualixar`` provenance
67
+ block containing content_hash, timestamp, and signature.
68
+ """
69
+ timestamp = time.time()
70
+ canonical = json.dumps(output_data, sort_keys=True, default=str)
71
+ content_hash = hashlib.sha256(canonical.encode()).hexdigest()
72
+
73
+ provenance = {
74
+ "_qualixar": {
75
+ "tool": self.tool_name,
76
+ "tool_description": self.tool_desc,
77
+ "version": self.version,
78
+ "platform": "Qualixar",
79
+ "timestamp": timestamp,
80
+ "content_hash": content_hash,
81
+ "license": "MIT",
82
+ "attribution": (
83
+ f"Generated by {self.tool_desc} v{self.version}"
84
+ ),
85
+ "signature": self._compute_signature(
86
+ content_hash, timestamp
87
+ ),
88
+ }
89
+ }
90
+
91
+ if isinstance(output_data, dict):
92
+ output_data.update(provenance)
93
+ return output_data
94
+ return {"data": output_data, **provenance}
95
+
96
+ def _compute_signature(
97
+ self, content_hash: str, timestamp: float
98
+ ) -> str:
99
+ """Compute HMAC-style signature over content hash and timestamp.
100
+
101
+ In production, this should use Ed25519 with a private key.
102
+ The current implementation uses SHA-256 over a canonical string
103
+ as a lightweight integrity check.
104
+
105
+ Args:
106
+ content_hash: SHA-256 hex digest of the canonical content.
107
+ timestamp: Unix timestamp of signing.
108
+
109
+ Returns:
110
+ Hex-encoded SHA-256 signature string.
111
+ """
112
+ sig_input = (
113
+ f"{self.tool_name}:{self.version}:"
114
+ f"{content_hash}:{timestamp}"
115
+ )
116
+ return hashlib.sha256(sig_input.encode()).hexdigest()
117
+
118
+ @staticmethod
119
+ def verify(output_data: Dict[str, Any]) -> bool:
120
+ """Verify an output's provenance signature.
121
+
122
+ Re-computes the content hash from the non-provenance fields and
123
+ compares it against the hash stored in the ``_qualixar`` block.
124
+
125
+ Args:
126
+ output_data: A dictionary previously signed with :meth:`sign`.
127
+
128
+ Returns:
129
+ ``True`` if the content hash matches, ``False`` otherwise.
130
+ """
131
+ prov = output_data.get("_qualixar", {})
132
+ if not prov:
133
+ return False
134
+ content = {
135
+ k: v for k, v in output_data.items() if k != "_qualixar"
136
+ }
137
+ canonical = json.dumps(content, sort_keys=True, default=str)
138
+ expected_hash = hashlib.sha256(canonical.encode()).hexdigest()
139
+ return prov.get("content_hash") == expected_hash
@@ -0,0 +1,78 @@
1
+ #!/usr/bin/env python3
2
+ # SPDX-License-Identifier: MIT
3
+ # Copyright (c) 2026 SuperLocalMemory (superlocalmemory.com)
4
+ # Part of Qualixar — Advancing Agent Development Through Research
5
+ """
6
+ Qualixar Watermark — Steganographic attribution for text outputs.
7
+
8
+ Embeds invisible zero-width Unicode characters in text to encode a tool
9
+ identifier. The watermark is invisible to human readers but can be
10
+ extracted programmatically to verify provenance.
11
+
12
+ Part of the 3-layer Qualixar attribution system:
13
+ Layer 1: Visible attribution (ATTRIBUTION.md, get_attribution())
14
+ Layer 2: Cryptographic signing (qualixar_attribution.py)
15
+ Layer 3: Steganographic watermarking (this module)
16
+
17
+ No external dependencies required.
18
+ """
19
+
20
+ # Zero-width characters for binary encoding
21
+ ZW_SPACE = '\u200b' # Zero-width space = bit 0
22
+ ZW_JOINER = '\u200d' # Zero-width joiner = bit 1
23
+ ZW_SEP = '\ufeff' # Byte order mark = separator
24
+
25
+
26
+ def encode_watermark(text: str, tool_id: str) -> str:
27
+ """Embed an invisible watermark in text output.
28
+
29
+ Converts ``tool_id`` to binary and encodes each bit as a zero-width
30
+ Unicode character. The watermark is inserted after the first paragraph
31
+ break (``\\n\\n``) so it remains invisible to human readers.
32
+
33
+ Args:
34
+ text: The text to watermark.
35
+ tool_id: Short identifier to embed (e.g. ``"slm"``).
36
+
37
+ Returns:
38
+ The original text with the invisible watermark inserted.
39
+ """
40
+ binary = ''.join(format(ord(c), '08b') for c in tool_id)
41
+ watermark = ZW_SEP
42
+ for bit in binary:
43
+ watermark += ZW_SPACE if bit == '0' else ZW_JOINER
44
+ watermark += ZW_SEP
45
+
46
+ # Insert after first paragraph break (invisible to users)
47
+ if '\n\n' in text:
48
+ idx = text.index('\n\n') + 2
49
+ return text[:idx] + watermark + text[idx:]
50
+ return text + watermark
51
+
52
+
53
+ def decode_watermark(text: str) -> str:
54
+ """Extract a hidden watermark from text.
55
+
56
+ Locates the zero-width separator characters and decodes the binary
57
+ payload between them back into the original tool identifier string.
58
+
59
+ Args:
60
+ text: Text that may contain a watermark.
61
+
62
+ Returns:
63
+ The decoded tool identifier, or an empty string if no watermark
64
+ is found.
65
+ """
66
+ start = text.find(ZW_SEP)
67
+ if start == -1:
68
+ return ""
69
+ end = text.find(ZW_SEP, start + 1)
70
+ if end == -1:
71
+ return ""
72
+ encoded = text[start + 1:end]
73
+ binary = ''.join(
74
+ '0' if c == ZW_SPACE else '1'
75
+ for c in encoded
76
+ )
77
+ chars = [binary[i:i + 8] for i in range(0, len(binary), 8)]
78
+ return ''.join(chr(int(b, 2)) for b in chars if len(b) == 8)
@@ -425,9 +425,9 @@ def validate_setup(auto_fix: bool = False) -> bool:
425
425
  print("\n✓ All required checks passed!")
426
426
  print("\nQuick Start Commands:")
427
427
  print(" 1. Add a memory:")
428
- print(" superlocalmemoryv2:remember 'Your content here'")
428
+ print(" superlocalmemoryv2-remember 'Your content here'")
429
429
  print("\n 2. Search memories:")
430
- print(" superlocalmemoryv2:recall 'search query'")
430
+ print(" superlocalmemoryv2-recall 'search query'")
431
431
  print("\n 3. Build knowledge graph (after adding 2+ memories):")
432
432
  print(" python ~/.claude-memory/graph_engine.py build")
433
433
  print("\n 4. Start UI server:")