@dinasor/mnemo-cli 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/CHANGELOG.md +46 -0
  2. package/LICENSE +21 -0
  3. package/README.md +263 -0
  4. package/VERSION +1 -0
  5. package/bin/mnemo.js +139 -0
  6. package/memory.ps1 +178 -0
  7. package/memory_mac.sh +2447 -0
  8. package/package.json +36 -0
  9. package/scripts/memory/installer/bootstrap.ps1 +21 -0
  10. package/scripts/memory/installer/core/bridge.ps1 +285 -0
  11. package/scripts/memory/installer/core/io.ps1 +110 -0
  12. package/scripts/memory/installer/core/paths.ps1 +83 -0
  13. package/scripts/memory/installer/features/gitignore_setup.ps1 +80 -0
  14. package/scripts/memory/installer/features/hooks_setup.ps1 +157 -0
  15. package/scripts/memory/installer/features/mcp_setup.ps1 +87 -0
  16. package/scripts/memory/installer/features/memory_scaffold.ps1 +541 -0
  17. package/scripts/memory/installer/features/vector_setup.ps1 +103 -0
  18. package/scripts/memory/installer/templates/add-journal-entry.ps1 +122 -0
  19. package/scripts/memory/installer/templates/add-lesson.ps1 +151 -0
  20. package/scripts/memory/installer/templates/autonomy/__init__.py +6 -0
  21. package/scripts/memory/installer/templates/autonomy/context_safety.py +181 -0
  22. package/scripts/memory/installer/templates/autonomy/entity_resolver.py +215 -0
  23. package/scripts/memory/installer/templates/autonomy/ingest_pipeline.py +252 -0
  24. package/scripts/memory/installer/templates/autonomy/lifecycle_engine.py +254 -0
  25. package/scripts/memory/installer/templates/autonomy/policies.yaml +59 -0
  26. package/scripts/memory/installer/templates/autonomy/reranker.py +220 -0
  27. package/scripts/memory/installer/templates/autonomy/retrieval_router.py +148 -0
  28. package/scripts/memory/installer/templates/autonomy/runner.py +272 -0
  29. package/scripts/memory/installer/templates/autonomy/schema.py +150 -0
  30. package/scripts/memory/installer/templates/autonomy/vault_policy.py +205 -0
  31. package/scripts/memory/installer/templates/build-memory-sqlite.py +111 -0
  32. package/scripts/memory/installer/templates/clear-active.ps1 +55 -0
  33. package/scripts/memory/installer/templates/customization.md +84 -0
  34. package/scripts/memory/installer/templates/lint-memory.ps1 +217 -0
  35. package/scripts/memory/installer/templates/mnemo_vector.py +556 -0
  36. package/scripts/memory/installer/templates/query-memory-sqlite.py +95 -0
  37. package/scripts/memory/installer/templates/query-memory.ps1 +122 -0
  38. package/scripts/memory/installer/templates/rebuild-memory-index.ps1 +293 -0
@@ -0,0 +1,150 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ schema.py - Mnemo typed memory schema v2.
4
+ Initializes and migrates the vector DB with full typed memory unit tables.
5
+ Used by all autonomy modules to get a DB connection with guaranteed schema.
6
+ """
7
+ import sqlite3
8
+ import os
9
+ from pathlib import Path
10
+
11
+ import sqlite_vec
12
+
13
+ SCHEMA_VERSION = 2
14
+ EMBED_DIM = int(os.getenv("MNEMO_EMBED_DIM", "1536"))
15
+
16
+
17
+ def _memory_root() -> Path:
18
+ override = os.getenv("MNEMO_MEMORY_ROOT", "").strip()
19
+ if override:
20
+ return Path(override).expanduser().resolve()
21
+
22
+ cwd = Path.cwd().resolve()
23
+ for root in (cwd, *cwd.parents):
24
+ for rel in ((".mnemo", "memory"), (".cursor", "memory")):
25
+ candidate = root.joinpath(*rel)
26
+ if candidate.exists():
27
+ return candidate
28
+ return cwd / ".mnemo" / "memory"
29
+
30
+
31
+ def _db_path() -> Path:
32
+ db_override = os.getenv("MNEMO_DB_PATH", "").strip()
33
+ if db_override:
34
+ return Path(db_override).expanduser().resolve()
35
+ return _memory_root() / "mnemo_vector.sqlite"
36
+
37
+
38
+ def get_db(db_path: Path | None = None, timeout: float = 30.0) -> sqlite3.Connection:
39
+ """Return a connected, migrated DB."""
40
+ if db_path is None:
41
+ db_path = _db_path()
42
+ db_path.parent.mkdir(parents=True, exist_ok=True)
43
+ db = sqlite3.connect(str(db_path), timeout=timeout)
44
+ db.row_factory = sqlite3.Row
45
+ db.execute("PRAGMA journal_mode=WAL")
46
+ db.execute("PRAGMA foreign_keys=ON")
47
+ db.execute("PRAGMA busy_timeout=10000")
48
+ db.enable_load_extension(True)
49
+ sqlite_vec.load(db)
50
+ _migrate(db)
51
+ return db
52
+
53
+
54
+ def _migrate(db: sqlite3.Connection) -> None:
55
+ db.execute("CREATE TABLE IF NOT EXISTS schema_info (key TEXT PRIMARY KEY, value TEXT)")
56
+ row = db.execute("SELECT value FROM schema_info WHERE key='version'").fetchone()
57
+ ver = int(row["value"]) if row else 0
58
+
59
+ if ver < 1:
60
+ db.execute("DROP TABLE IF EXISTS file_meta")
61
+ db.execute("DROP TABLE IF EXISTS vec_memory")
62
+ db.execute("""
63
+ CREATE TABLE file_meta (
64
+ path TEXT PRIMARY KEY,
65
+ hash TEXT NOT NULL,
66
+ chunk_count INTEGER DEFAULT 0,
67
+ updated_at REAL DEFAULT (unixepoch('now'))
68
+ )
69
+ """)
70
+ db.execute(f"""
71
+ CREATE VIRTUAL TABLE vec_memory USING vec0(
72
+ embedding float[{EMBED_DIM}] distance_metric=cosine,
73
+ +ref_path TEXT,
74
+ +content TEXT,
75
+ +source_file TEXT
76
+ )
77
+ """)
78
+
79
+ if ver < 2:
80
+ db.execute("""
81
+ CREATE TABLE IF NOT EXISTS memory_units (
82
+ unit_id TEXT PRIMARY KEY,
83
+ source_ref TEXT NOT NULL UNIQUE,
84
+ memory_type TEXT NOT NULL DEFAULT 'semantic',
85
+ authority REAL NOT NULL DEFAULT 0.5,
86
+ time_scope TEXT NOT NULL DEFAULT 'time-bound',
87
+ sensitivity TEXT NOT NULL DEFAULT 'public',
88
+ entity_tags TEXT NOT NULL DEFAULT '[]',
89
+ content_hash TEXT NOT NULL,
90
+ created_at REAL DEFAULT (unixepoch('now')),
91
+ updated_at REAL DEFAULT (unixepoch('now'))
92
+ )
93
+ """)
94
+ db.execute("""
95
+ CREATE TABLE IF NOT EXISTS facts (
96
+ fact_id TEXT PRIMARY KEY,
97
+ canonical_fact TEXT NOT NULL,
98
+ status TEXT NOT NULL DEFAULT 'active',
99
+ confidence REAL NOT NULL DEFAULT 1.0,
100
+ source_ref TEXT NOT NULL,
101
+ created_at REAL DEFAULT (unixepoch('now')),
102
+ updated_at REAL DEFAULT (unixepoch('now'))
103
+ )
104
+ """)
105
+ db.execute("""
106
+ CREATE TABLE IF NOT EXISTS lifecycle_events (
107
+ event_id TEXT PRIMARY KEY,
108
+ unit_id TEXT NOT NULL,
109
+ operation TEXT NOT NULL CHECK (operation IN ('ADD','UPDATE','DEPRECATE','NOOP')),
110
+ old_status TEXT,
111
+ new_status TEXT,
112
+ reason TEXT,
113
+ ts REAL DEFAULT (unixepoch('now'))
114
+ )
115
+ """)
116
+ db.execute("""
117
+ CREATE TABLE IF NOT EXISTS entities (
118
+ entity_id TEXT PRIMARY KEY,
119
+ entity_name TEXT NOT NULL,
120
+ entity_type TEXT NOT NULL DEFAULT 'general',
121
+ confidence REAL NOT NULL DEFAULT 1.0,
122
+ created_at REAL DEFAULT (unixepoch('now'))
123
+ )
124
+ """)
125
+ db.execute("""
126
+ CREATE TABLE IF NOT EXISTS entity_aliases (
127
+ alias_id TEXT PRIMARY KEY,
128
+ entity_id TEXT NOT NULL REFERENCES entities(entity_id),
129
+ alias_text TEXT NOT NULL,
130
+ confidence REAL NOT NULL DEFAULT 1.0,
131
+ UNIQUE(alias_text)
132
+ )
133
+ """)
134
+ db.execute("""
135
+ CREATE TABLE IF NOT EXISTS autonomy_state (
136
+ key TEXT PRIMARY KEY,
137
+ value TEXT,
138
+ updated_at REAL DEFAULT (unixepoch('now'))
139
+ )
140
+ """)
141
+ db.execute(
142
+ "INSERT OR REPLACE INTO schema_info(key, value) VALUES ('version', ?)",
143
+ (str(SCHEMA_VERSION),),
144
+ )
145
+ db.commit()
146
+
147
+
148
+ def get_schema_version(db: sqlite3.Connection) -> int:
149
+ row = db.execute("SELECT value FROM schema_info WHERE key='version'").fetchone()
150
+ return int(row["value"]) if row else 0
@@ -0,0 +1,205 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ vault_policy.py - Vault lane and sensitivity enforcement for Mnemo.
4
+
5
+ Handles:
6
+ - Marking files/units as secret/vault (sensitivity classification)
7
+ - Automatic redaction before context pack delivery
8
+ - Policy config loading from policies.yaml
9
+ - Autonomous redaction pipeline (no human required)
10
+ """
11
+ import re
12
+ import sqlite3
13
+ import os
14
+ import yaml
15
+ from pathlib import Path
16
+ from typing import Optional
17
+
18
+ from autonomy.schema import get_db
19
+
20
+
21
+ def _resolve_memory_root() -> Path:
22
+ override = os.getenv("MNEMO_MEMORY_ROOT", "").strip()
23
+ if override:
24
+ return Path(override).expanduser().resolve()
25
+
26
+ cwd = Path.cwd().resolve()
27
+ for root in (cwd, *cwd.parents):
28
+ for rel in ((".mnemo", "memory"), (".cursor", "memory")):
29
+ candidate = root.joinpath(*rel)
30
+ if candidate.exists():
31
+ return candidate
32
+ return cwd / ".mnemo" / "memory"
33
+
34
+
35
+ DEFAULT_POLICY_PATH = _resolve_memory_root() / ".autonomy" / "policies.yaml"
36
+ _POLICY_CACHE: dict | None = None
37
+
38
+ # Built-in secret patterns (supplemented by policies.yaml)
39
+ _BUILTIN_SECRET_PATTERNS = [
40
+ r"(?i)(api[_-]?key|secret[_-]?key|password|token|auth[_-]?token)\s*[:=]\s*\S+",
41
+ r"(?i)bearer\s+[a-zA-Z0-9._-]{20,}",
42
+ r"[a-zA-Z0-9]{32,}", # Long API keys (conservative - only in vault paths)
43
+ ]
44
+
45
+ REDACTION_PLACEHOLDER = "[REDACTED]"
46
+
47
+
48
+ def load_policy(policy_path: Path = DEFAULT_POLICY_PATH) -> dict:
49
+ """Load vault policy from YAML file, with fallback to defaults."""
50
+ global _POLICY_CACHE
51
+ if _POLICY_CACHE is not None:
52
+ return _POLICY_CACHE
53
+
54
+ defaults = {
55
+ "sensitivity_paths": {
56
+ "secret": [".mnemo/memory/vault/", ".cursor/memory/vault/", ".env", "*.secret.*"],
57
+ "internal": [".mnemo/memory/active-context.md", ".cursor/memory/active-context.md"],
58
+ },
59
+ "redaction_patterns": [],
60
+ "allow_internal_for_roles": ["agent", "autonomous"],
61
+ "max_sensitivity_in_context": "internal",
62
+ }
63
+
64
+ if not policy_path.exists():
65
+ _POLICY_CACHE = defaults
66
+ return defaults
67
+
68
+ try:
69
+ with open(policy_path, "r", encoding="utf-8") as f:
70
+ loaded = yaml.safe_load(f) or {}
71
+ merged = {**defaults, **loaded}
72
+ _POLICY_CACHE = merged
73
+ return merged
74
+ except Exception:
75
+ _POLICY_CACHE = defaults
76
+ return defaults
77
+
78
+
79
+ def invalidate_policy_cache() -> None:
80
+ global _POLICY_CACHE
81
+ _POLICY_CACHE = None
82
+
83
+
84
+ def classify_sensitivity(path_str: str, policy: dict | None = None) -> str:
85
+ """Return 'public', 'internal', or 'secret' for a given file path."""
86
+ if policy is None:
87
+ policy = load_policy()
88
+
89
+ p_lower = path_str.lower().replace("\\", "/")
90
+ patterns = policy.get("sensitivity_paths", {})
91
+
92
+ for label in ("secret", "internal", "public"):
93
+ for pattern in patterns.get(label, []):
94
+ pat_lower = pattern.lower().replace("\\", "/")
95
+ if pat_lower.endswith("/") and pat_lower in p_lower:
96
+ return label
97
+ if pat_lower.startswith("*.") and p_lower.endswith(pat_lower[1:]):
98
+ return label
99
+ if pat_lower in p_lower:
100
+ return label
101
+
102
+ # Built-in vault detection
103
+ if "/vault/" in p_lower:
104
+ return "secret"
105
+ if ".secret." in p_lower or "secret." in p_lower:
106
+ return "secret"
107
+
108
+ return "public"
109
+
110
+
111
+ def redact_content(content: str, sensitivity: str, policy: dict | None = None) -> str:
112
+ """
113
+ Redact sensitive patterns from content.
114
+ For 'secret' sensitivity: apply all redaction patterns.
115
+ For 'internal': apply only external-facing redaction.
116
+ """
117
+ if sensitivity == "public":
118
+ return content
119
+
120
+ if policy is None:
121
+ policy = load_policy()
122
+
123
+ result = content
124
+ patterns_to_apply = list(_BUILTIN_SECRET_PATTERNS)
125
+ extra = policy.get("redaction_patterns", [])
126
+ if isinstance(extra, list):
127
+ patterns_to_apply.extend(extra)
128
+
129
+ for pat in patterns_to_apply:
130
+ try:
131
+ result = re.sub(pat, REDACTION_PLACEHOLDER, result)
132
+ except re.error:
133
+ pass
134
+
135
+ return result
136
+
137
+
138
+ class VaultPolicy:
139
+ def __init__(
140
+ self,
141
+ db: Optional[sqlite3.Connection] = None,
142
+ policy_path: Path = DEFAULT_POLICY_PATH,
143
+ ):
144
+ self.db = db or get_db()
145
+ self.policy = load_policy(policy_path)
146
+
147
+ def classify_and_persist(self, source_ref: str) -> str:
148
+ """Classify sensitivity and update DB record."""
149
+ sensitivity = classify_sensitivity(source_ref, self.policy)
150
+ self.db.execute(
151
+ "UPDATE memory_units SET sensitivity=?, updated_at=unixepoch('now') WHERE source_ref=?",
152
+ (sensitivity, source_ref),
153
+ )
154
+ self.db.commit()
155
+ return sensitivity
156
+
157
+ def bulk_reclassify(self) -> dict[str, int]:
158
+ """Reclassify all memory units. Returns {sensitivity: count}."""
159
+ rows = self.db.execute("SELECT unit_id, source_ref FROM memory_units").fetchall()
160
+ counts: dict[str, int] = {"public": 0, "internal": 0, "secret": 0}
161
+
162
+ for row in rows:
163
+ sensitivity = classify_sensitivity(row["source_ref"], self.policy)
164
+ self.db.execute(
165
+ "UPDATE memory_units SET sensitivity=?, updated_at=unixepoch('now') WHERE unit_id=?",
166
+ (sensitivity, row["unit_id"]),
167
+ )
168
+ counts[sensitivity] = counts.get(sensitivity, 0) + 1
169
+
170
+ self.db.commit()
171
+ return counts
172
+
173
+ def is_authorized(self, sensitivity: str, role: str = "agent") -> bool:
174
+ """Return True if the given role is authorized to see this sensitivity level."""
175
+ max_level = self.policy.get("max_sensitivity_in_context", "internal")
176
+ allowed_roles = self.policy.get("allow_internal_for_roles", [])
177
+
178
+ if sensitivity == "public":
179
+ return True
180
+ if sensitivity == "secret":
181
+ return False
182
+ if sensitivity == "internal":
183
+ return role in allowed_roles
184
+
185
+ return False
186
+
187
+ def apply_redaction(self, content: str, sensitivity: str) -> str:
188
+ """Apply content redaction based on sensitivity level."""
189
+ return redact_content(content, sensitivity, self.policy)
190
+
191
+ def audit_report(self) -> dict:
192
+ """Return audit summary of sensitivity distribution."""
193
+ rows = self.db.execute(
194
+ "SELECT sensitivity, COUNT(*) as cnt FROM memory_units GROUP BY sensitivity"
195
+ ).fetchall()
196
+ report = {r["sensitivity"]: r["cnt"] for r in rows}
197
+
198
+ # Check for units with secret content in non-vault paths
199
+ leaked = self.db.execute(
200
+ "SELECT COUNT(*) FROM memory_units WHERE sensitivity='secret' AND source_ref NOT LIKE '%vault%'"
201
+ ).fetchone()[0]
202
+ if leaked:
203
+ report["_warning_potential_leaks"] = leaked
204
+
205
+ return report
@@ -0,0 +1,111 @@
1
+ #!/usr/bin/env python3
2
+ """Build SQLite FTS5 index from memory JSON indexes."""
3
+ import argparse
4
+ import json
5
+ import sqlite3
6
+ from pathlib import Path
7
+
8
+
9
+ def read_text(p: Path) -> str:
10
+ return p.read_text(encoding="utf-8-sig", errors="replace")
11
+
12
+
13
+ def resolve_memory_dir(repo: Path) -> Path:
14
+ candidates = [
15
+ repo / ".mnemo" / "memory",
16
+ repo / ".cursor" / "memory",
17
+ ]
18
+ for candidate in candidates:
19
+ if candidate.exists():
20
+ return candidate
21
+ return candidates[0]
22
+
23
+
24
+ def main():
25
+ ap = argparse.ArgumentParser()
26
+ ap.add_argument("--repo", required=True)
27
+ args = ap.parse_args()
28
+
29
+ repo = Path(args.repo)
30
+ mem = resolve_memory_dir(repo)
31
+ out_db = mem / "memory.sqlite"
32
+
33
+ lessons_index = mem / "lessons-index.json"
34
+ journal_index = mem / "journal-index.json"
35
+
36
+ lessons = []
37
+ if lessons_index.exists():
38
+ t = read_text(lessons_index).strip()
39
+ if t:
40
+ lessons = json.loads(t)
41
+ if not isinstance(lessons, list):
42
+ lessons = [lessons] if lessons else []
43
+
44
+ journal = []
45
+ if journal_index.exists():
46
+ t = read_text(journal_index).strip()
47
+ if t:
48
+ journal = json.loads(t)
49
+ if not isinstance(journal, list):
50
+ journal = [journal] if journal else []
51
+
52
+ if out_db.exists():
53
+ out_db.unlink()
54
+
55
+ con = sqlite3.connect(str(out_db))
56
+ cur = con.cursor()
57
+ cur.execute("CREATE VIRTUAL TABLE memory_fts USING fts5(kind, id, date, tags, title, content, path);")
58
+
59
+ for kind, fid, path in [
60
+ ("hot_rules", "HOT", mem / "hot-rules.md"),
61
+ ("active", "ACTIVE", mem / "active-context.md"),
62
+ ("memo", "MEMO", mem / "memo.md"),
63
+ ]:
64
+ if path.exists():
65
+ cur.execute(
66
+ "INSERT INTO memory_fts(kind,id,date,tags,title,content,path) VALUES (?,?,?,?,?,?,?)",
67
+ (kind, fid, None, "", path.name, read_text(path), str(path)),
68
+ )
69
+
70
+ lessons_dir = mem / "lessons"
71
+ for l in lessons:
72
+ lid = l.get("Id")
73
+ title = l.get("Title", "")
74
+ tags = " ".join(l.get("Tags") or [])
75
+ date = l.get("Introduced")
76
+ file = l.get("File", "")
77
+ path = lessons_dir / file if file else (mem / "lessons.md")
78
+ content = read_text(path) if path.exists() else f"{title}\nRule: {l.get('Rule', '')}"
79
+ cur.execute(
80
+ "INSERT INTO memory_fts(kind,id,date,tags,title,content,path) VALUES (?,?,?,?,?,?,?)",
81
+ ("lesson", lid, date, tags, title, content, str(path)),
82
+ )
83
+
84
+ for e in journal:
85
+ tags = " ".join(e.get("Tags") or [])
86
+ files = e.get("Files") or []
87
+ if isinstance(files, dict):
88
+ files = []
89
+ content = f"{e.get('Title', '')}\nFiles: {', '.join(files)}"
90
+ path = mem / "journal" / (e.get("MonthFile") or "")
91
+ cur.execute(
92
+ "INSERT INTO memory_fts(kind,id,date,tags,title,content,path) VALUES (?,?,?,?,?,?,?)",
93
+ ("journal", None, e.get("Date"), tags, e.get("Title"), content, str(path)),
94
+ )
95
+
96
+ digests = mem / "digests"
97
+ if digests.exists():
98
+ for p in digests.glob("*.digest.md"):
99
+ cur.execute(
100
+ "INSERT INTO memory_fts(kind,id,date,tags,title,content,path) VALUES (?,?,?,?,?,?,?)",
101
+ ("digest", None, None, "", p.name, read_text(p), str(p)),
102
+ )
103
+
104
+ con.commit()
105
+ con.close()
106
+ print(f"Built: {out_db}")
107
+ return 0
108
+
109
+
110
+ if __name__ == "__main__":
111
+ raise SystemExit(main())
@@ -0,0 +1,55 @@
1
+ <#
2
+ clear-active.ps1
3
+ Resets active-context.md to blank template.
4
+ #>
5
+
6
+ Set-StrictMode -Version Latest
7
+ $ErrorActionPreference = "Stop"
8
+
9
+ if ($PSScriptRoot) {
10
+ $RepoRoot = (Resolve-Path (Join-Path $PSScriptRoot "..\..")).Path
11
+ } else {
12
+ $RepoRoot = (Get-Location).Path
13
+ }
14
+
15
+ function Resolve-MnemoMemoryDir([string]$Root) {
16
+ $candidates = @(
17
+ (Join-Path $Root ".mnemo\memory"),
18
+ (Join-Path $Root ".cursor\memory")
19
+ )
20
+ foreach ($candidate in $candidates) {
21
+ if (Test-Path -LiteralPath $candidate) { return $candidate }
22
+ }
23
+ return $candidates[0]
24
+ }
25
+
26
+ $ActivePath = Join-Path (Resolve-MnemoMemoryDir -Root $RepoRoot) "active-context.md"
27
+
28
+ $Template = @"
29
+ # Active Context (Session Scratchpad)
30
+
31
+ Priority: this overrides older journal history *for this session only*.
32
+
33
+ CLEAR this file when the task is done:
34
+ - Run ``scripts/memory/clear-active.ps1``
35
+
36
+ ## Current Goal
37
+ -
38
+
39
+ ## Files in Focus
40
+ -
41
+
42
+ ## Findings / Decisions
43
+ -
44
+
45
+ ## Temporary Constraints
46
+ -
47
+
48
+ ## Blockers
49
+ -
50
+ "@
51
+
52
+ $enc = New-Object System.Text.UTF8Encoding($false)
53
+ [System.IO.File]::WriteAllText($ActivePath, ($Template -replace "`r?`n", "`r`n"), $enc)
54
+
55
+ Write-Host "Cleared: $ActivePath" -ForegroundColor Green
@@ -0,0 +1,84 @@
1
+ # Mnemo Memory Customization Prompt (paste into an AI)
2
+
3
+ You are an AI coding agent. Your task is to **customize the Mnemo memory system** created by running `memory.ps1` in the root of THIS repository.
4
+
5
+ ## Non-negotiable rules
6
+
7
+ - **Do not lose legacy memory.** If you find an older memory system (e.g. `Archive/`, `.cursor_old/`, `docs/memory/`, etc.), copy it into:
8
+ - `.cursor/memory/legacy/<source-name>/`
9
+ - **Do not overwrite** the new Mnemo structure unless explicitly required. Prefer merge + preserve.
10
+ - Keep the always-read layer token-safe:
11
+ - `.cursor/memory/hot-rules.md` stays ~20 lines (hard invariants only).
12
+ - `.cursor/memory/memo.md` is "current truth", not history (move history into journals).
13
+ - Mnemo authority order (highest → lowest):
14
+ - Lessons > active-context > memo > journal.
15
+
16
+ ## Deliverable (what you must produce)
17
+
18
+ 1) Project-customized memory in `.cursor/memory/` (memo + index + regression checklist updated).
19
+ 2) Legacy memory preserved in `.cursor/memory/legacy/...`.
20
+ 3) Lint passes for the memory system.
21
+
22
+ ## Required steps
23
+
24
+ ### 1) Inventory this repo
25
+
26
+ - Identify the project type, main entrypoints, key modules, build/test commands, and "hot" folders.
27
+
28
+ ### 2) Update `.cursor/memory/memo.md` (project truth)
29
+
30
+ Fill it with high-signal bullets:
31
+ - Ownership map (which folder/module owns what)
32
+ - Invariants/constraints (forbidden APIs, timing constraints, state ownership rules)
33
+ - Load order requirements (if relevant)
34
+ - Integration points (plugins, external systems)
35
+
36
+ Keep it short; no journaling here.
37
+
38
+ ### 3) Update `.cursor/memory/index.md` (orientation)
39
+
40
+ - Add a "Hotspots" section listing the most bug-prone or most-edited files/dirs.
41
+ - Add a short "If you only remember one thing" section (max 3 bullets).
42
+
43
+ ### 4) Update `.cursor/memory/regression-checklist.md`
44
+
45
+ Make it match this repo's reality:
46
+ - Build/test commands
47
+ - Runtime/manual checks
48
+ - Areas that commonly regress
49
+
50
+ ### 5) Import legacy journals
51
+
52
+ - Merge/copy legacy monthly journals into `.cursor/memory/journal/YYYY-MM.md`
53
+ - Ensure each date header appears once per month (`## YYYY-MM-DD`)
54
+
55
+ ### 6) Convert legacy lessons into atomic lessons (if needed)
56
+
57
+ If legacy has a single `lessons.md`:
58
+ - Convert it into individual files:
59
+ - `.cursor/memory/lessons/L-001-*.md`, `L-002-*.md`, ...
60
+ - Each lesson must have valid YAML frontmatter required by the linter.
61
+ - If you introduce new tags, add them to `.cursor/memory/tag-vocabulary.md`.
62
+
63
+ ### 7) Rules cleanup (recommended)
64
+
65
+ If `.cursor/rules/` contains duplicated always-apply rules:
66
+ - Merge into a single rule file (keep it readable with headings).
67
+ - Remove duplicates to avoid conflicting instructions.
68
+
69
+ ### 8) Rebuild indexes + lint
70
+
71
+ Run:
72
+
73
+ ```powershell
74
+ powershell -ExecutionPolicy Bypass -File scripts/memory/rebuild-memory-index.ps1
75
+ powershell -ExecutionPolicy Bypass -File scripts/memory/lint-memory.ps1
76
+ ```
77
+
78
+ Fix any lint errors you introduced.
79
+
80
+ ## Final response format
81
+
82
+ - What you changed (files + why)
83
+ - Where legacy memory is preserved
84
+ - Lint result (pass/fail + any warnings)