@dinasor/mnemo-cli 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +46 -0
- package/LICENSE +21 -0
- package/README.md +263 -0
- package/VERSION +1 -0
- package/bin/mnemo.js +139 -0
- package/memory.ps1 +178 -0
- package/memory_mac.sh +2447 -0
- package/package.json +36 -0
- package/scripts/memory/installer/bootstrap.ps1 +21 -0
- package/scripts/memory/installer/core/bridge.ps1 +285 -0
- package/scripts/memory/installer/core/io.ps1 +110 -0
- package/scripts/memory/installer/core/paths.ps1 +83 -0
- package/scripts/memory/installer/features/gitignore_setup.ps1 +80 -0
- package/scripts/memory/installer/features/hooks_setup.ps1 +157 -0
- package/scripts/memory/installer/features/mcp_setup.ps1 +87 -0
- package/scripts/memory/installer/features/memory_scaffold.ps1 +541 -0
- package/scripts/memory/installer/features/vector_setup.ps1 +103 -0
- package/scripts/memory/installer/templates/add-journal-entry.ps1 +122 -0
- package/scripts/memory/installer/templates/add-lesson.ps1 +151 -0
- package/scripts/memory/installer/templates/autonomy/__init__.py +6 -0
- package/scripts/memory/installer/templates/autonomy/context_safety.py +181 -0
- package/scripts/memory/installer/templates/autonomy/entity_resolver.py +215 -0
- package/scripts/memory/installer/templates/autonomy/ingest_pipeline.py +252 -0
- package/scripts/memory/installer/templates/autonomy/lifecycle_engine.py +254 -0
- package/scripts/memory/installer/templates/autonomy/policies.yaml +59 -0
- package/scripts/memory/installer/templates/autonomy/reranker.py +220 -0
- package/scripts/memory/installer/templates/autonomy/retrieval_router.py +148 -0
- package/scripts/memory/installer/templates/autonomy/runner.py +272 -0
- package/scripts/memory/installer/templates/autonomy/schema.py +150 -0
- package/scripts/memory/installer/templates/autonomy/vault_policy.py +205 -0
- package/scripts/memory/installer/templates/build-memory-sqlite.py +111 -0
- package/scripts/memory/installer/templates/clear-active.ps1 +55 -0
- package/scripts/memory/installer/templates/customization.md +84 -0
- package/scripts/memory/installer/templates/lint-memory.ps1 +217 -0
- package/scripts/memory/installer/templates/mnemo_vector.py +556 -0
- package/scripts/memory/installer/templates/query-memory-sqlite.py +95 -0
- package/scripts/memory/installer/templates/query-memory.ps1 +122 -0
- package/scripts/memory/installer/templates/rebuild-memory-index.ps1 +293 -0
package/memory_mac.sh
ADDED
|
@@ -0,0 +1,2447 @@
|
|
|
1
|
+
#!/bin/sh
|
|
2
|
+
# Mnemo memory installer (macOS / POSIX shell)
|
|
3
|
+
# Zero extra requirements on macOS: uses /bin/sh + standard Unix tools.
|
|
4
|
+
#
|
|
5
|
+
# Usage (from repo root, macOS Terminal):
|
|
6
|
+
# sh ./memory_mac.sh
|
|
7
|
+
# sh ./memory_mac.sh --project-name "MyProject"
|
|
8
|
+
# sh ./memory_mac.sh --force
|
|
9
|
+
# sh ./memory_mac.sh --dry-run
|
|
10
|
+
# sh ./memory_mac.sh --enable-vector
|
|
11
|
+
# sh ./memory_mac.sh --enable-vector --vector-provider gemini
|
|
12
|
+
#
|
|
13
|
+
# This creates:
|
|
14
|
+
# .cursor/memory/*, .cursor/rules/*, scripts/memory/*, .githooks/pre-commit
|
|
15
|
+
# (and optional .githooks/post-commit when --enable-vector is used)
|
|
16
|
+
|
|
17
|
+
set -eu
|
|
18
|
+
|
|
19
|
+
REPO_ROOT="$(pwd)"
|
|
20
|
+
PROJECT_NAME=""
|
|
21
|
+
FORCE="0"
|
|
22
|
+
DRY_RUN="0"
|
|
23
|
+
ENABLE_VECTOR="0"
|
|
24
|
+
VECTOR_PROVIDER="openai"
|
|
25
|
+
|
|
26
|
+
while [ $# -gt 0 ]; do
|
|
27
|
+
case "$1" in
|
|
28
|
+
--repo-root)
|
|
29
|
+
REPO_ROOT="$2"; shift 2;;
|
|
30
|
+
--project-name)
|
|
31
|
+
PROJECT_NAME="$2"; shift 2;;
|
|
32
|
+
--force)
|
|
33
|
+
FORCE="1"; shift 1;;
|
|
34
|
+
--dry-run)
|
|
35
|
+
DRY_RUN="1"; shift 1;;
|
|
36
|
+
--enable-vector)
|
|
37
|
+
ENABLE_VECTOR="1"; shift 1;;
|
|
38
|
+
--vector-provider)
|
|
39
|
+
VECTOR_PROVIDER="$2"; shift 2;;
|
|
40
|
+
-h|--help)
|
|
41
|
+
echo "Usage: sh ./memory_mac.sh [--repo-root PATH] [--project-name NAME] [--force] [--dry-run] [--enable-vector] [--vector-provider openai|gemini]"
|
|
42
|
+
exit 0;;
|
|
43
|
+
*)
|
|
44
|
+
echo "Unknown arg: $1" >&2
|
|
45
|
+
exit 2;;
|
|
46
|
+
esac
|
|
47
|
+
done
|
|
48
|
+
|
|
49
|
+
if [ "$DRY_RUN" = "1" ]; then
|
|
50
|
+
echo "[DRY RUN] No files will be written. Showing what would happen."
|
|
51
|
+
fi
|
|
52
|
+
|
|
53
|
+
if [ "$VECTOR_PROVIDER" != "openai" ] && [ "$VECTOR_PROVIDER" != "gemini" ]; then
|
|
54
|
+
echo "Invalid --vector-provider: $VECTOR_PROVIDER (expected openai or gemini)" >&2
|
|
55
|
+
exit 2
|
|
56
|
+
fi
|
|
57
|
+
|
|
58
|
+
if [ -z "$PROJECT_NAME" ]; then
|
|
59
|
+
PROJECT_NAME="$(basename "$REPO_ROOT")"
|
|
60
|
+
fi
|
|
61
|
+
|
|
62
|
+
# Read version from VERSION file at installer location (single source of truth)
|
|
63
|
+
_INSTALLER_DIR="$(cd "$(dirname "$0")" && pwd)"
|
|
64
|
+
MNEMO_VERSION="0.0.0"
|
|
65
|
+
if [ -f "$_INSTALLER_DIR/VERSION" ]; then
|
|
66
|
+
MNEMO_VERSION="$(cat "$_INSTALLER_DIR/VERSION" | tr -d '[:space:]')"
|
|
67
|
+
fi
|
|
68
|
+
|
|
69
|
+
MONTH="$(date +%Y-%m)"
|
|
70
|
+
TODAY="$(date +%Y-%m-%d)"
|
|
71
|
+
|
|
72
|
+
# Canonical Mnemo identity root
|
|
73
|
+
MNEMO_DIR="$REPO_ROOT/.mnemo"
|
|
74
|
+
MNEMO_MEMORY_DIR="$MNEMO_DIR/memory"
|
|
75
|
+
MNEMO_RULES_DIR="$MNEMO_DIR/rules"
|
|
76
|
+
MNEMO_RULES_CURSOR_DIR="$MNEMO_RULES_DIR/cursor"
|
|
77
|
+
MNEMO_RULES_AGENT_DIR="$MNEMO_RULES_DIR/agent"
|
|
78
|
+
MNEMO_MCP_DIR="$MNEMO_DIR/mcp"
|
|
79
|
+
MNEMO_CURSOR_MCP_PATH="$MNEMO_MCP_DIR/cursor.mcp.json"
|
|
80
|
+
|
|
81
|
+
# IDE integration bridge targets
|
|
82
|
+
CURSOR_DIR="$REPO_ROOT/.cursor"
|
|
83
|
+
CURSOR_MEMORY_BRIDGE="$CURSOR_DIR/memory"
|
|
84
|
+
CURSOR_RULES_BRIDGE="$CURSOR_DIR/rules"
|
|
85
|
+
CURSOR_MCP_BRIDGE="$CURSOR_DIR/mcp.json"
|
|
86
|
+
AGENT_DIR="$REPO_ROOT/.agent"
|
|
87
|
+
AGENT_RULES_BRIDGE="$AGENT_DIR/rules"
|
|
88
|
+
|
|
89
|
+
# Backward-compatible aliases used by script body (now canonicalized to .mnemo)
|
|
90
|
+
MEMORY_DIR="$MNEMO_MEMORY_DIR"
|
|
91
|
+
RULES_DIR="$MNEMO_RULES_CURSOR_DIR"
|
|
92
|
+
JOURNAL_DIR="$MEMORY_DIR/journal"
|
|
93
|
+
DIGESTS_DIR="$MEMORY_DIR/digests"
|
|
94
|
+
ADR_DIR="$MEMORY_DIR/adr"
|
|
95
|
+
LESSONS_DIR="$MEMORY_DIR/lessons"
|
|
96
|
+
TEMPLATES_DIR="$MEMORY_DIR/templates"
|
|
97
|
+
SCRIPTS_DIR="$REPO_ROOT/scripts"
|
|
98
|
+
MEM_SCRIPTS_DIR="$SCRIPTS_DIR/memory"
|
|
99
|
+
GITHOOKS_DIR="$REPO_ROOT/.githooks"
|
|
100
|
+
AGENT_RULES_DIR="$MNEMO_RULES_AGENT_DIR"
|
|
101
|
+
|
|
102
|
+
mkdir -p "$MNEMO_DIR" "$MNEMO_MEMORY_DIR" "$MNEMO_RULES_DIR" "$MNEMO_RULES_CURSOR_DIR" "$MNEMO_RULES_AGENT_DIR" "$MNEMO_MCP_DIR" \
|
|
103
|
+
"$CURSOR_DIR" "$AGENT_DIR" \
|
|
104
|
+
"$MEMORY_DIR" "$RULES_DIR" "$JOURNAL_DIR" "$DIGESTS_DIR" "$ADR_DIR" "$LESSONS_DIR" "$TEMPLATES_DIR" \
|
|
105
|
+
"$SCRIPTS_DIR" "$MEM_SCRIPTS_DIR" "$GITHOOKS_DIR"
|
|
106
|
+
|
|
107
|
+
write_file() {
|
|
108
|
+
# write_file <path> <stdin>
|
|
109
|
+
path="$1"
|
|
110
|
+
if [ -f "$path" ] && [ "$FORCE" != "1" ]; then
|
|
111
|
+
printf '%s\n' "SKIP (exists): $path"
|
|
112
|
+
# Still consume stdin to avoid broken pipe
|
|
113
|
+
cat > /dev/null
|
|
114
|
+
return 0
|
|
115
|
+
fi
|
|
116
|
+
if [ "$DRY_RUN" = "1" ]; then
|
|
117
|
+
printf '%s\n' "[DRY RUN] WOULD WRITE: $path"
|
|
118
|
+
cat > /dev/null
|
|
119
|
+
return 0
|
|
120
|
+
fi
|
|
121
|
+
dir="$(dirname "$path")"
|
|
122
|
+
[ -d "$dir" ] || mkdir -p "$dir"
|
|
123
|
+
tmp="${path}.tmp.$$"
|
|
124
|
+
cat > "$tmp"
|
|
125
|
+
mv "$tmp" "$path"
|
|
126
|
+
printf '%s\n' "WROTE: $path"
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
sync_dir_one_way() {
|
|
130
|
+
src="$1"
|
|
131
|
+
dst="$2"
|
|
132
|
+
[ -d "$src" ] || return 0
|
|
133
|
+
mkdir -p "$dst"
|
|
134
|
+
if [ "$DRY_RUN" = "1" ]; then
|
|
135
|
+
printf '%s\n' "[DRY RUN] WOULD SYNC DIR: $src -> $dst"
|
|
136
|
+
return 0
|
|
137
|
+
fi
|
|
138
|
+
if command -v rsync >/dev/null 2>&1; then
|
|
139
|
+
rsync -a --ignore-existing "$src/" "$dst/" >/dev/null 2>&1 || true
|
|
140
|
+
else
|
|
141
|
+
cp -R "$src/." "$dst/" 2>/dev/null || true
|
|
142
|
+
fi
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
ensure_dir_bridge() {
|
|
146
|
+
canonical="$1"
|
|
147
|
+
bridge="$2"
|
|
148
|
+
bridge_parent="$(dirname "$bridge")"
|
|
149
|
+
mkdir -p "$canonical" "$bridge_parent"
|
|
150
|
+
|
|
151
|
+
if [ -L "$bridge" ]; then
|
|
152
|
+
current_target="$(readlink "$bridge" 2>/dev/null || true)"
|
|
153
|
+
if [ "$current_target" = "$canonical" ]; then
|
|
154
|
+
printf '%s\n' "BRIDGE (linked): $bridge -> $canonical"
|
|
155
|
+
return 0
|
|
156
|
+
fi
|
|
157
|
+
if [ "$DRY_RUN" = "1" ]; then
|
|
158
|
+
printf '%s\n' "[DRY RUN] WOULD REPAIR BRIDGE: $bridge -> $canonical"
|
|
159
|
+
return 0
|
|
160
|
+
fi
|
|
161
|
+
rm -f "$bridge"
|
|
162
|
+
fi
|
|
163
|
+
|
|
164
|
+
if [ -e "$bridge" ] && [ ! -L "$bridge" ]; then
|
|
165
|
+
# Existing real directory: keep permanent mirror mode (no destructive removal).
|
|
166
|
+
sync_dir_one_way "$bridge" "$canonical"
|
|
167
|
+
sync_dir_one_way "$canonical" "$bridge"
|
|
168
|
+
printf '%s\n' "BRIDGE (mirror): $bridge <-> $canonical"
|
|
169
|
+
return 0
|
|
170
|
+
fi
|
|
171
|
+
|
|
172
|
+
if [ "$DRY_RUN" = "1" ]; then
|
|
173
|
+
printf '%s\n' "[DRY RUN] WOULD CREATE SYMLINK: $bridge -> $canonical"
|
|
174
|
+
return 0
|
|
175
|
+
fi
|
|
176
|
+
|
|
177
|
+
if ln -s "$canonical" "$bridge" 2>/dev/null; then
|
|
178
|
+
printf '%s\n' "BRIDGE (symlink): $bridge -> $canonical"
|
|
179
|
+
return 0
|
|
180
|
+
fi
|
|
181
|
+
|
|
182
|
+
mkdir -p "$bridge"
|
|
183
|
+
sync_dir_one_way "$canonical" "$bridge"
|
|
184
|
+
printf '%s\n' "BRIDGE (mirror): $bridge <-> $canonical"
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
ensure_file_bridge() {
|
|
188
|
+
canonical="$1"
|
|
189
|
+
bridge="$2"
|
|
190
|
+
bridge_parent="$(dirname "$bridge")"
|
|
191
|
+
canonical_parent="$(dirname "$canonical")"
|
|
192
|
+
mkdir -p "$bridge_parent" "$canonical_parent"
|
|
193
|
+
|
|
194
|
+
if [ ! -f "$canonical" ] && [ -f "$bridge" ]; then
|
|
195
|
+
if [ "$DRY_RUN" = "1" ]; then
|
|
196
|
+
printf '%s\n' "[DRY RUN] WOULD COPY FILE: $bridge -> $canonical"
|
|
197
|
+
else
|
|
198
|
+
cp "$bridge" "$canonical"
|
|
199
|
+
fi
|
|
200
|
+
fi
|
|
201
|
+
[ -f "$canonical" ] || return 0
|
|
202
|
+
|
|
203
|
+
if [ -L "$bridge" ]; then
|
|
204
|
+
current_target="$(readlink "$bridge" 2>/dev/null || true)"
|
|
205
|
+
if [ "$current_target" = "$canonical" ]; then
|
|
206
|
+
printf '%s\n' "BRIDGE (linked): $bridge -> $canonical"
|
|
207
|
+
return 0
|
|
208
|
+
fi
|
|
209
|
+
if [ "$DRY_RUN" = "1" ]; then
|
|
210
|
+
printf '%s\n' "[DRY RUN] WOULD REPAIR FILE BRIDGE: $bridge -> $canonical"
|
|
211
|
+
return 0
|
|
212
|
+
fi
|
|
213
|
+
rm -f "$bridge"
|
|
214
|
+
fi
|
|
215
|
+
|
|
216
|
+
if [ -e "$bridge" ] && [ ! -L "$bridge" ]; then
|
|
217
|
+
if [ "$DRY_RUN" = "1" ]; then
|
|
218
|
+
printf '%s\n' "[DRY RUN] WOULD MIRROR FILE: $canonical <-> $bridge"
|
|
219
|
+
return 0
|
|
220
|
+
fi
|
|
221
|
+
if [ "$bridge" -nt "$canonical" ]; then
|
|
222
|
+
cp "$bridge" "$canonical"
|
|
223
|
+
fi
|
|
224
|
+
cp "$canonical" "$bridge"
|
|
225
|
+
printf '%s\n' "BRIDGE (mirror): $bridge <-> $canonical"
|
|
226
|
+
return 0
|
|
227
|
+
fi
|
|
228
|
+
|
|
229
|
+
if [ "$DRY_RUN" = "1" ]; then
|
|
230
|
+
printf '%s\n' "[DRY RUN] WOULD CREATE FILE SYMLINK: $bridge -> $canonical"
|
|
231
|
+
return 0
|
|
232
|
+
fi
|
|
233
|
+
if ln -s "$canonical" "$bridge" 2>/dev/null; then
|
|
234
|
+
printf '%s\n' "BRIDGE (symlink): $bridge -> $canonical"
|
|
235
|
+
return 0
|
|
236
|
+
fi
|
|
237
|
+
cp "$canonical" "$bridge"
|
|
238
|
+
printf '%s\n' "BRIDGE (mirror): $bridge <-> $canonical"
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
ensure_mnemo_bridges() {
|
|
242
|
+
ensure_dir_bridge "$MNEMO_MEMORY_DIR" "$CURSOR_MEMORY_BRIDGE"
|
|
243
|
+
ensure_dir_bridge "$MNEMO_RULES_CURSOR_DIR" "$CURSOR_RULES_BRIDGE"
|
|
244
|
+
ensure_dir_bridge "$MNEMO_RULES_AGENT_DIR" "$AGENT_RULES_BRIDGE"
|
|
245
|
+
if [ -f "$MNEMO_CURSOR_MCP_PATH" ] || [ -f "$CURSOR_MCP_BRIDGE" ]; then
|
|
246
|
+
ensure_file_bridge "$MNEMO_CURSOR_MCP_PATH" "$CURSOR_MCP_BRIDGE"
|
|
247
|
+
fi
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
# Migrate legacy paths into canonical .mnemo before generating files.
|
|
251
|
+
if [ "$DRY_RUN" != "1" ]; then
|
|
252
|
+
if [ -d "$CURSOR_MEMORY_BRIDGE" ] && [ ! -L "$CURSOR_MEMORY_BRIDGE" ]; then
|
|
253
|
+
sync_dir_one_way "$CURSOR_MEMORY_BRIDGE" "$MNEMO_MEMORY_DIR"
|
|
254
|
+
fi
|
|
255
|
+
if [ -d "$CURSOR_RULES_BRIDGE" ] && [ ! -L "$CURSOR_RULES_BRIDGE" ]; then
|
|
256
|
+
sync_dir_one_way "$CURSOR_RULES_BRIDGE" "$MNEMO_RULES_CURSOR_DIR"
|
|
257
|
+
fi
|
|
258
|
+
if [ -d "$AGENT_RULES_BRIDGE" ] && [ ! -L "$AGENT_RULES_BRIDGE" ]; then
|
|
259
|
+
sync_dir_one_way "$AGENT_RULES_BRIDGE" "$MNEMO_RULES_AGENT_DIR"
|
|
260
|
+
fi
|
|
261
|
+
fi
|
|
262
|
+
|
|
263
|
+
# -------------------------
|
|
264
|
+
# Memory files
|
|
265
|
+
# -------------------------
|
|
266
|
+
|
|
267
|
+
write_file "$MEMORY_DIR/index.md" <<'EOF'
|
|
268
|
+
# Memory Index
|
|
269
|
+
|
|
270
|
+
Entry point for repo memory.
|
|
271
|
+
|
|
272
|
+
## Read order (token-safe)
|
|
273
|
+
|
|
274
|
+
ALWAYS READ (in order):
|
|
275
|
+
1) `hot-rules.md` (tiny invariants, <20 lines)
|
|
276
|
+
2) `active-context.md` (this session only)
|
|
277
|
+
3) `memo.md` (long-term current truth + ownership)
|
|
278
|
+
|
|
279
|
+
SEARCH FIRST, THEN OPEN ONLY WHAT MATCHES:
|
|
280
|
+
4) `lessons/index.md` -> find lesson ID(s)
|
|
281
|
+
5) `lessons/L-XXX-*.md` -> open only specific lesson(s)
|
|
282
|
+
6) `digests/YYYY-MM.digest.md` -> before raw journal
|
|
283
|
+
7) `journal/YYYY-MM.md` -> only for archaeology
|
|
284
|
+
|
|
285
|
+
## Maintenance commands
|
|
286
|
+
|
|
287
|
+
Shell helper scripts (macOS-friendly):
|
|
288
|
+
- Add lesson: `scripts/memory/add-lesson.sh --title "..." --tags "..." --rule "..."`
|
|
289
|
+
- Add journal: `scripts/memory/add-journal-entry.sh --tags "..." --title "..."`
|
|
290
|
+
- Rebuild indexes: `scripts/memory/rebuild-memory-index.sh`
|
|
291
|
+
- Lint: `scripts/memory/lint-memory.sh`
|
|
292
|
+
- Query: `scripts/memory/query-memory.sh --query "..."`
|
|
293
|
+
- Clear session: `scripts/memory/clear-active.sh`
|
|
294
|
+
EOF
|
|
295
|
+
|
|
296
|
+
write_file "$MEMORY_DIR/hot-rules.md" <<'EOF'
|
|
297
|
+
# Hot Rules (MUST READ)
|
|
298
|
+
|
|
299
|
+
Keep this file under ~20 lines. If it grows, move content into memo or lessons.
|
|
300
|
+
|
|
301
|
+
## Authority Order (highest to lowest)
|
|
302
|
+
1) Lessons override EVERYTHING (including active-context)
|
|
303
|
+
2) `active-context.md` overrides memo/journal (but NOT lessons)
|
|
304
|
+
3) `memo.md` is long-term project truth
|
|
305
|
+
4) journal is history
|
|
306
|
+
|
|
307
|
+
## Retrieval Rules
|
|
308
|
+
5) Do NOT scan raw journals. Use indexes/digests first.
|
|
309
|
+
6) Reuse existing patterns. Check memo.md ownership before creating new systems.
|
|
310
|
+
7) When done: clear active-context.md, add journal entry if significant.
|
|
311
|
+
EOF
|
|
312
|
+
|
|
313
|
+
write_file "$MEMORY_DIR/active-context.md" <<'EOF'
|
|
314
|
+
# Active Context (Session Scratchpad)
|
|
315
|
+
|
|
316
|
+
Priority: this overrides older journal history *for this session only*.
|
|
317
|
+
|
|
318
|
+
CLEAR this file when the task is done:
|
|
319
|
+
- Run `scripts/memory/clear-active.sh`
|
|
320
|
+
|
|
321
|
+
## Current Goal
|
|
322
|
+
-
|
|
323
|
+
|
|
324
|
+
## Files in Focus
|
|
325
|
+
-
|
|
326
|
+
|
|
327
|
+
## Findings / Decisions
|
|
328
|
+
-
|
|
329
|
+
|
|
330
|
+
## Temporary Constraints
|
|
331
|
+
-
|
|
332
|
+
|
|
333
|
+
## Blockers
|
|
334
|
+
-
|
|
335
|
+
EOF
|
|
336
|
+
|
|
337
|
+
write_file "$MEMORY_DIR/memo.md" <<EOF
|
|
338
|
+
# Project Memo - $PROJECT_NAME
|
|
339
|
+
|
|
340
|
+
Last updated: $TODAY
|
|
341
|
+
|
|
342
|
+
## Ownership / Modules
|
|
343
|
+
- TODO
|
|
344
|
+
|
|
345
|
+
## Invariants
|
|
346
|
+
- TODO
|
|
347
|
+
|
|
348
|
+
## Build / Run
|
|
349
|
+
- TODO
|
|
350
|
+
|
|
351
|
+
## Integration Points
|
|
352
|
+
- TODO
|
|
353
|
+
EOF
|
|
354
|
+
|
|
355
|
+
write_file "$LESSONS_DIR/README.md" <<'EOF'
|
|
356
|
+
# Lessons
|
|
357
|
+
|
|
358
|
+
Lessons are atomic “rules learned the hard way”.
|
|
359
|
+
|
|
360
|
+
Rules:
|
|
361
|
+
- One lesson per file: `L-XXX-title.md`
|
|
362
|
+
- Must include YAML frontmatter at the top (`---` … `---`)
|
|
363
|
+
- Keep lessons high-signal and reusable
|
|
364
|
+
EOF
|
|
365
|
+
|
|
366
|
+
write_file "$LESSONS_DIR/index.md" <<'EOF'
|
|
367
|
+
# Lessons Index (generated)
|
|
368
|
+
|
|
369
|
+
Generated by `scripts/memory/rebuild-memory-index.sh`.
|
|
370
|
+
|
|
371
|
+
Format: ID | [Tags] | AppliesTo | Rule | File
|
|
372
|
+
|
|
373
|
+
(No lessons yet.)
|
|
374
|
+
EOF
|
|
375
|
+
|
|
376
|
+
write_file "$JOURNAL_DIR/README.md" <<'EOF'
|
|
377
|
+
# Journal
|
|
378
|
+
|
|
379
|
+
Monthly file: `YYYY-MM.md`
|
|
380
|
+
|
|
381
|
+
Rules:
|
|
382
|
+
- Each date appears ONCE per file: `## YYYY-MM-DD`
|
|
383
|
+
- Put multiple entries under that header as bullets.
|
|
384
|
+
- Keep it high-signal: what changed, why, key files.
|
|
385
|
+
EOF
|
|
386
|
+
|
|
387
|
+
write_file "$JOURNAL_DIR/$MONTH.md" <<EOF
|
|
388
|
+
# Development Journal - $PROJECT_NAME ($MONTH)
|
|
389
|
+
|
|
390
|
+
## $TODAY
|
|
391
|
+
|
|
392
|
+
- [Process] Initialized memory system (Mnemo v$MNEMO_VERSION)
|
|
393
|
+
- Why: token-safe AI memory + indexed retrieval + portable hooks
|
|
394
|
+
- Key files:
|
|
395
|
+
- \`.cursor/memory/*\`
|
|
396
|
+
- \`.cursor/rules/00-memory-system.mdc\`
|
|
397
|
+
- \`scripts/memory/*\`
|
|
398
|
+
EOF
|
|
399
|
+
|
|
400
|
+
write_file "$DIGESTS_DIR/README.md" <<'EOF'
|
|
401
|
+
# Digests
|
|
402
|
+
|
|
403
|
+
Generated summaries of journal months.
|
|
404
|
+
AI should read digests before raw journal.
|
|
405
|
+
EOF
|
|
406
|
+
|
|
407
|
+
write_file "$ADR_DIR/README.md" <<'EOF'
|
|
408
|
+
# ADRs
|
|
409
|
+
|
|
410
|
+
Architecture Decision Records: why we did it this way.
|
|
411
|
+
|
|
412
|
+
Naming:
|
|
413
|
+
- `ADR-001-short-title.md`
|
|
414
|
+
EOF
|
|
415
|
+
|
|
416
|
+
write_file "$MEMORY_DIR/tag-vocabulary.md" <<'EOF'
|
|
417
|
+
# Tag Vocabulary (fixed set)
|
|
418
|
+
|
|
419
|
+
Use a small vocabulary so retrieval stays reliable.
|
|
420
|
+
|
|
421
|
+
- [UI] - UI behavior, rendering, interaction
|
|
422
|
+
- [Layout] - layout groups, anchors, sizing, rects
|
|
423
|
+
- [Input] - mouse/keyboard/controller input rules
|
|
424
|
+
- [Data] - parsing, payloads, formats, state sync
|
|
425
|
+
- [Server] - server-side logic and lifecycle
|
|
426
|
+
- [Init] - initialization / load order / startup
|
|
427
|
+
- [Build] - compilation, project files
|
|
428
|
+
- [CI] - automation, pipelines
|
|
429
|
+
- [Release] - packaging, artifacts, uploads
|
|
430
|
+
- [Compat] - runtime constraints, environment quirks
|
|
431
|
+
- [Integration] - plugins, external systems
|
|
432
|
+
- [Docs] - documentation and changelog work
|
|
433
|
+
- [Architecture] - module boundaries, refactors, ownership
|
|
434
|
+
- [DX] - developer experience, tooling, maintainability
|
|
435
|
+
- [Reliability] - crash prevention, guardrails, self-healing
|
|
436
|
+
- [Process] - workflow, memory system, tooling changes
|
|
437
|
+
|
|
438
|
+
# Common "type" tags (templates/examples)
|
|
439
|
+
- [Fix] - bug fixes, regressions, patches
|
|
440
|
+
- [Feature] - new behavior/capability
|
|
441
|
+
- [Refactor] - restructuring without behavior changes
|
|
442
|
+
EOF
|
|
443
|
+
|
|
444
|
+
write_file "$MEMORY_DIR/regression-checklist.md" <<'EOF'
|
|
445
|
+
# Regression Checklist
|
|
446
|
+
|
|
447
|
+
Run only what is relevant.
|
|
448
|
+
|
|
449
|
+
## Build
|
|
450
|
+
- [ ] Build / run relevant commands
|
|
451
|
+
- [ ] No new warnings (or documented)
|
|
452
|
+
|
|
453
|
+
## Runtime (if applicable)
|
|
454
|
+
- [ ] Core UI renders
|
|
455
|
+
- [ ] Core interactions work
|
|
456
|
+
- [ ] No obvious errors/log spam
|
|
457
|
+
|
|
458
|
+
## Docs (if applicable)
|
|
459
|
+
- [ ] Journal updated
|
|
460
|
+
- [ ] Memo updated (if truth changed)
|
|
461
|
+
- [ ] Lesson added (if pitfall discovered)
|
|
462
|
+
EOF
|
|
463
|
+
|
|
464
|
+
write_file "$TEMPLATES_DIR/lesson.template.md" <<'EOF'
|
|
465
|
+
---
|
|
466
|
+
id: L-XXX
|
|
467
|
+
title: Short descriptive title
|
|
468
|
+
status: Active
|
|
469
|
+
tags: [UI, Reliability]
|
|
470
|
+
introduced: YYYY-MM-DD
|
|
471
|
+
applies_to:
|
|
472
|
+
- path/or/glob/**
|
|
473
|
+
triggers:
|
|
474
|
+
- error keyword
|
|
475
|
+
rule: One sentence. Imperative. Testable.
|
|
476
|
+
supersedes: ""
|
|
477
|
+
---
|
|
478
|
+
|
|
479
|
+
# L-XXX - Short descriptive title
|
|
480
|
+
|
|
481
|
+
## Symptom
|
|
482
|
+
TODO
|
|
483
|
+
|
|
484
|
+
## Root cause
|
|
485
|
+
TODO
|
|
486
|
+
|
|
487
|
+
## Wrong approach (DO NOT REPEAT)
|
|
488
|
+
- TODO
|
|
489
|
+
|
|
490
|
+
## Correct approach
|
|
491
|
+
- TODO
|
|
492
|
+
EOF
|
|
493
|
+
|
|
494
|
+
write_file "$TEMPLATES_DIR/journal-entry.template.md" <<'EOF'
|
|
495
|
+
# Journal Entry Template (paste under an existing date header)
|
|
496
|
+
|
|
497
|
+
- [Area][Type] Title
|
|
498
|
+
- Why: ...
|
|
499
|
+
- Key files:
|
|
500
|
+
- `path/to/file`
|
|
501
|
+
- Verification: Build PASS/FAIL/NOT RUN; Runtime PASS/FAIL/NOT RUN
|
|
502
|
+
EOF
|
|
503
|
+
|
|
504
|
+
write_file "$TEMPLATES_DIR/adr.template.md" <<'EOF'
|
|
505
|
+
# ADR-XXX - Title
|
|
506
|
+
|
|
507
|
+
Date: YYYY-MM-DD
|
|
508
|
+
Status: Proposed | Accepted | Deprecated
|
|
509
|
+
|
|
510
|
+
## Context
|
|
511
|
+
TODO
|
|
512
|
+
|
|
513
|
+
## Decision
|
|
514
|
+
TODO
|
|
515
|
+
|
|
516
|
+
## Consequences
|
|
517
|
+
TODO
|
|
518
|
+
EOF
|
|
519
|
+
|
|
520
|
+
# -------------------------
|
|
521
|
+
# Cursor rule (Cursor will pick this up; other agents can still read .cursor/memory)
|
|
522
|
+
# -------------------------
|
|
523
|
+
|
|
524
|
+
write_file "$RULES_DIR/00-memory-system.mdc" <<EOF
|
|
525
|
+
---
|
|
526
|
+
description: Mnemo Memory System v$MNEMO_VERSION - Authority + Atomic Retrieval + Token Safety
|
|
527
|
+
globs:
|
|
528
|
+
- "**/*"
|
|
529
|
+
alwaysApply: true
|
|
530
|
+
---
|
|
531
|
+
|
|
532
|
+
# Memory System (MANDATORY)
|
|
533
|
+
|
|
534
|
+
## Authority Order (highest to lowest)
|
|
535
|
+
1) Lessons override EVERYTHING (including active-context)
|
|
536
|
+
2) active-context.md overrides memo/journal (but NOT lessons)
|
|
537
|
+
3) memo.md is long-term project truth
|
|
538
|
+
4) Journal is history
|
|
539
|
+
|
|
540
|
+
## Token-Safe Retrieval
|
|
541
|
+
|
|
542
|
+
ALWAYS READ (in order):
|
|
543
|
+
1. .cursor/memory/hot-rules.md
|
|
544
|
+
2. .cursor/memory/active-context.md
|
|
545
|
+
3. .cursor/memory/memo.md
|
|
546
|
+
|
|
547
|
+
SEARCH FIRST, THEN FETCH:
|
|
548
|
+
4. .cursor/memory/lessons/index.md -> find relevant lesson ID
|
|
549
|
+
5. .cursor/memory/lessons/L-XXX-title.md -> load ONLY the specific file
|
|
550
|
+
6. .cursor/memory/digests/YYYY-MM.digest.md -> before raw journal
|
|
551
|
+
7. .cursor/memory/journal/YYYY-MM.md -> only for archaeology
|
|
552
|
+
|
|
553
|
+
## Helper Scripts (macOS)
|
|
554
|
+
|
|
555
|
+
- Add lesson: scripts/memory/add-lesson.sh --title "..." --tags "..." --rule "..."
|
|
556
|
+
- Add journal: scripts/memory/add-journal-entry.sh --tags "..." --title "..."
|
|
557
|
+
- Rebuild: scripts/memory/rebuild-memory-index.sh
|
|
558
|
+
- Lint: scripts/memory/lint-memory.sh
|
|
559
|
+
- Query: scripts/memory/query-memory.sh --query "..."
|
|
560
|
+
- Clear: scripts/memory/clear-active.sh
|
|
561
|
+
EOF
|
|
562
|
+
|
|
563
|
+
# -------------------------
|
|
564
|
+
# Multi-agent bridge files
|
|
565
|
+
# -------------------------
|
|
566
|
+
|
|
567
|
+
mkdir -p "$AGENT_RULES_DIR"
|
|
568
|
+
|
|
569
|
+
write_file "$REPO_ROOT/CLAUDE.md" <<'EOF'
|
|
570
|
+
# Project Memory (Mnemo)
|
|
571
|
+
|
|
572
|
+
This project uses Mnemo for structured AI memory.
|
|
573
|
+
Memory lives in `.cursor/memory/` as the single source of truth.
|
|
574
|
+
|
|
575
|
+
## Read Order (ALWAYS)
|
|
576
|
+
1. `.cursor/memory/hot-rules.md` - tiny invariants (<20 lines)
|
|
577
|
+
2. `.cursor/memory/active-context.md` - current session state
|
|
578
|
+
3. `.cursor/memory/memo.md` - long-term project truth + ownership
|
|
579
|
+
|
|
580
|
+
## Search First, Then Fetch
|
|
581
|
+
- `.cursor/memory/lessons/index.md` → find lesson ID → open only that lesson file
|
|
582
|
+
- `.cursor/memory/digests/YYYY-MM.digest.md` → before raw journal archaeology
|
|
583
|
+
- `.cursor/memory/journal/YYYY-MM.md` → only for deep history
|
|
584
|
+
|
|
585
|
+
## After Any Feature/Fix
|
|
586
|
+
1. Update `active-context.md` during work
|
|
587
|
+
2. Add journal entry when done
|
|
588
|
+
3. Create lesson if you discovered a pitfall
|
|
589
|
+
4. Update `memo.md` if project truth changed
|
|
590
|
+
5. Clear `active-context.md` when task is merged
|
|
591
|
+
EOF
|
|
592
|
+
|
|
593
|
+
write_file "$AGENT_RULES_DIR/memory-system.md" <<'EOF'
|
|
594
|
+
---
|
|
595
|
+
description: Mnemo memory system - structured AI memory in .cursor/memory/
|
|
596
|
+
alwaysApply: true
|
|
597
|
+
---
|
|
598
|
+
|
|
599
|
+
# Memory System (Mnemo)
|
|
600
|
+
|
|
601
|
+
This project uses Mnemo for structured AI memory. All memory lives in `.cursor/memory/`.
|
|
602
|
+
|
|
603
|
+
## Read Order (ALWAYS)
|
|
604
|
+
1. `.cursor/memory/hot-rules.md` - tiny invariants (read first)
|
|
605
|
+
2. `.cursor/memory/active-context.md` - current session state
|
|
606
|
+
3. `.cursor/memory/memo.md` - project truth + ownership
|
|
607
|
+
|
|
608
|
+
## Search First, Then Fetch
|
|
609
|
+
- `.cursor/memory/lessons/index.md` - searchable lesson index
|
|
610
|
+
- `.cursor/memory/digests/*.digest.md` - monthly summaries
|
|
611
|
+
- `.cursor/memory/journal/*.md` - raw history (last resort)
|
|
612
|
+
|
|
613
|
+
## Authority Order
|
|
614
|
+
1. Lessons override everything
|
|
615
|
+
2. active-context overrides memo/journal (but NOT lessons)
|
|
616
|
+
3. memo.md is long-term truth
|
|
617
|
+
4. Journal is history
|
|
618
|
+
|
|
619
|
+
## After Any Task
|
|
620
|
+
- Update active-context.md during work
|
|
621
|
+
- Add journal entry when done
|
|
622
|
+
- Create lesson if you discovered a pitfall
|
|
623
|
+
- Clear active-context.md when task is merged
|
|
624
|
+
EOF
|
|
625
|
+
|
|
626
|
+
write_file "$REPO_ROOT/AGENTS.md" <<'EOF'
|
|
627
|
+
# Memory System (Mnemo)
|
|
628
|
+
|
|
629
|
+
This project uses Mnemo for structured AI memory.
|
|
630
|
+
Memory location: `.cursor/memory/`
|
|
631
|
+
|
|
632
|
+
## Retrieval Order
|
|
633
|
+
1. Read `.cursor/memory/hot-rules.md` first (tiny, <20 lines)
|
|
634
|
+
2. Read `.cursor/memory/active-context.md` for current session
|
|
635
|
+
3. Read `.cursor/memory/memo.md` for project truth + ownership
|
|
636
|
+
4. Search `.cursor/memory/lessons/index.md` before creating new patterns
|
|
637
|
+
5. Check `.cursor/memory/digests/` before raw journal archaeology
|
|
638
|
+
|
|
639
|
+
## Authority Order (highest to lowest)
|
|
640
|
+
1. Lessons override EVERYTHING
|
|
641
|
+
2. active-context.md overrides memo/journal (but NOT lessons)
|
|
642
|
+
3. memo.md is long-term project truth
|
|
643
|
+
4. Journal is history
|
|
644
|
+
5. Existing codebase
|
|
645
|
+
6. New suggestions (lowest priority)
|
|
646
|
+
EOF
|
|
647
|
+
|
|
648
|
+
write_file "$MEM_SCRIPTS_DIR/customization.md" <<'EOF'
|
|
649
|
+
# Mnemo Memory Customization Prompt (paste into an AI)
|
|
650
|
+
|
|
651
|
+
You are an AI coding agent. Your task is to **customize the Mnemo memory system** created by running the installer in the root of THIS repository.
|
|
652
|
+
|
|
653
|
+
## Non-negotiable rules
|
|
654
|
+
|
|
655
|
+
- **Do not lose legacy memory.** If you find an older memory system (e.g. `Archive/`, `.cursor_old/`, `docs/memory/`, etc.), copy it into:
|
|
656
|
+
- `.cursor/memory/legacy/<source-name>/`
|
|
657
|
+
- **Do not overwrite** the Mnemo structure unless explicitly required. Prefer merge + preserve.
|
|
658
|
+
- Keep the always-read layer token-safe:
|
|
659
|
+
- `.cursor/memory/hot-rules.md` stays ~20 lines (hard invariants only).
|
|
660
|
+
- `.cursor/memory/memo.md` is “current truth”, not history (move history into journals).
|
|
661
|
+
- Mnemo authority order (highest → lowest):
|
|
662
|
+
- Lessons > active-context > memo > journal.
|
|
663
|
+
|
|
664
|
+
## Deliverable
|
|
665
|
+
|
|
666
|
+
1) Project-customized memory in `.cursor/memory/` (memo + index + regression checklist updated).
|
|
667
|
+
2) Legacy memory preserved in `.cursor/memory/legacy/...`.
|
|
668
|
+
3) Lint passes for the memory system.
|
|
669
|
+
EOF
|
|
670
|
+
|
|
671
|
+
# -------------------------
|
|
672
|
+
# Helper scripts (shell)
|
|
673
|
+
# -------------------------
|
|
674
|
+
|
|
675
|
+
write_file "$MEM_SCRIPTS_DIR/query-memory.sh" <<'EOF'
|
|
676
|
+
#!/bin/sh
|
|
677
|
+
set -eu
|
|
678
|
+
|
|
679
|
+
QUERY=""
|
|
680
|
+
AREA="All"
|
|
681
|
+
FORMAT="Human"
|
|
682
|
+
USE_SQLITE="0"
|
|
683
|
+
|
|
684
|
+
while [ $# -gt 0 ]; do
|
|
685
|
+
case "$1" in
|
|
686
|
+
--query) QUERY="$2"; shift 2;;
|
|
687
|
+
--area) AREA="$2"; shift 2;;
|
|
688
|
+
--format) FORMAT="$2"; shift 2;;
|
|
689
|
+
--use-sqlite) USE_SQLITE="1"; shift 1;;
|
|
690
|
+
-h|--help)
|
|
691
|
+
echo "Usage: sh ./scripts/memory/query-memory.sh --query \"...\" [--area All|HotRules|Active|Memo|Lessons|Journal|Digests] [--format Human|AI] [--use-sqlite]"
|
|
692
|
+
exit 0;;
|
|
693
|
+
*) echo "Unknown arg: $1" >&2; exit 2;;
|
|
694
|
+
esac
|
|
695
|
+
done
|
|
696
|
+
|
|
697
|
+
if [ -z "$QUERY" ]; then
|
|
698
|
+
echo "Missing --query" >&2
|
|
699
|
+
exit 2
|
|
700
|
+
fi
|
|
701
|
+
|
|
702
|
+
ROOT="$(cd "$(dirname "$0")/../.." && pwd)"
|
|
703
|
+
MEM="$ROOT/.cursor/memory"
|
|
704
|
+
LESSONS="$MEM/lessons"
|
|
705
|
+
|
|
706
|
+
to_lower() { echo "$1" | tr '[:upper:]' '[:lower:]'; }
|
|
707
|
+
|
|
708
|
+
area_l="$(to_lower "$AREA")"
|
|
709
|
+
format_l="$(to_lower "$FORMAT")"
|
|
710
|
+
|
|
711
|
+
if [ "$USE_SQLITE" = "1" ]; then
|
|
712
|
+
if command -v python3 >/dev/null 2>&1 && [ -f "$MEM/memory.sqlite" ] && [ -f "$ROOT/scripts/memory/query-memory-sqlite.py" ]; then
|
|
713
|
+
python3 "$ROOT/scripts/memory/query-memory-sqlite.py" --repo "$ROOT" --q "$QUERY" --area "$AREA" --format "$FORMAT"
|
|
714
|
+
exit $?
|
|
715
|
+
fi
|
|
716
|
+
echo "SQLite mode unavailable (need python3 + .cursor/memory/memory.sqlite + query-memory-sqlite.py). Falling back to file search." >&2
|
|
717
|
+
fi
|
|
718
|
+
|
|
719
|
+
targets=""
|
|
720
|
+
case "$area_l" in
|
|
721
|
+
hotrules|hot) targets="$MEM/hot-rules.md" ;;
|
|
722
|
+
active) targets="$MEM/active-context.md" ;;
|
|
723
|
+
memo) targets="$MEM/memo.md" ;;
|
|
724
|
+
lessons) targets="$LESSONS/index.md $LESSONS/L-*.md" ;;
|
|
725
|
+
journal) targets="$MEM/journal-index.md" ;;
|
|
726
|
+
digests) targets="$MEM/digests/"'*.digest.md' ;;
|
|
727
|
+
all) targets="$MEM/hot-rules.md $MEM/active-context.md $MEM/memo.md $LESSONS/index.md $MEM/journal-index.md $MEM/digests/"'*.digest.md' ;;
|
|
728
|
+
*) echo "Unknown --area: $AREA" >&2; exit 2 ;;
|
|
729
|
+
esac
|
|
730
|
+
|
|
731
|
+
tmp="${TMPDIR:-/tmp}/mnemo-query.$$"
|
|
732
|
+
rm -f "$tmp"
|
|
733
|
+
|
|
734
|
+
for t in $targets; do
|
|
735
|
+
# shellcheck disable=SC2086
|
|
736
|
+
[ -e $t ] || continue
|
|
737
|
+
# shellcheck disable=SC2086
|
|
738
|
+
grep -nH "$QUERY" $t 2>/dev/null >>"$tmp" || true
|
|
739
|
+
done
|
|
740
|
+
|
|
741
|
+
match_count=0
|
|
742
|
+
if [ -f "$tmp" ]; then
|
|
743
|
+
match_count="$(wc -l < "$tmp" | awk '{$1=$1;print}')"
|
|
744
|
+
fi
|
|
745
|
+
|
|
746
|
+
if [ "$format_l" = "ai" ]; then
|
|
747
|
+
if [ "$match_count" -eq 0 ]; then
|
|
748
|
+
echo "No matches found for: $QUERY"
|
|
749
|
+
else
|
|
750
|
+
echo "Files to read:"
|
|
751
|
+
cut -d: -f1 "$tmp" | sort -u | while IFS= read -r f; do
|
|
752
|
+
rel="${f#$ROOT/}"
|
|
753
|
+
echo " @$rel"
|
|
754
|
+
done
|
|
755
|
+
fi
|
|
756
|
+
else
|
|
757
|
+
echo "Searching: $QUERY"
|
|
758
|
+
echo "Area: $AREA"
|
|
759
|
+
echo ""
|
|
760
|
+
if [ "$match_count" -eq 0 ]; then
|
|
761
|
+
echo "No matches found."
|
|
762
|
+
else
|
|
763
|
+
cat "$tmp"
|
|
764
|
+
fi
|
|
765
|
+
fi
|
|
766
|
+
|
|
767
|
+
rm -f "$tmp"
|
|
768
|
+
EOF
|
|
769
|
+
|
|
770
|
+
write_file "$MEM_SCRIPTS_DIR/clear-active.sh" <<'EOF'
|
|
771
|
+
#!/bin/sh
|
|
772
|
+
set -eu
|
|
773
|
+
ROOT="$(cd "$(dirname "$0")/../.." && pwd)"
|
|
774
|
+
ACTIVE="$ROOT/.cursor/memory/active-context.md"
|
|
775
|
+
cat > "$ACTIVE" <<'T'
|
|
776
|
+
# Active Context (Session Scratchpad)
|
|
777
|
+
|
|
778
|
+
Priority: this overrides older journal history *for this session only*.
|
|
779
|
+
|
|
780
|
+
CLEAR this file when the task is done:
|
|
781
|
+
- Run `scripts/memory/clear-active.sh`
|
|
782
|
+
|
|
783
|
+
## Current Goal
|
|
784
|
+
-
|
|
785
|
+
|
|
786
|
+
## Files in Focus
|
|
787
|
+
-
|
|
788
|
+
|
|
789
|
+
## Findings / Decisions
|
|
790
|
+
-
|
|
791
|
+
|
|
792
|
+
## Temporary Constraints
|
|
793
|
+
-
|
|
794
|
+
|
|
795
|
+
## Blockers
|
|
796
|
+
-
|
|
797
|
+
T
|
|
798
|
+
echo "Cleared: $ACTIVE"
|
|
799
|
+
EOF
|
|
800
|
+
|
|
801
|
+
write_file "$MEM_SCRIPTS_DIR/add-journal-entry.sh" <<'EOF'
|
|
802
|
+
#!/bin/sh
|
|
803
|
+
set -eu
|
|
804
|
+
|
|
805
|
+
TAGS=""
|
|
806
|
+
TITLE=""
|
|
807
|
+
FILES=""
|
|
808
|
+
WHY=""
|
|
809
|
+
DATE="$(date +%Y-%m-%d)"
|
|
810
|
+
|
|
811
|
+
while [ $# -gt 0 ]; do
|
|
812
|
+
case "$1" in
|
|
813
|
+
--tags) TAGS="$2"; shift 2;;
|
|
814
|
+
--title) TITLE="$2"; shift 2;;
|
|
815
|
+
--files) FILES="$2"; shift 2;;
|
|
816
|
+
--why) WHY="$2"; shift 2;;
|
|
817
|
+
--date) DATE="$2"; shift 2;;
|
|
818
|
+
-h|--help)
|
|
819
|
+
echo "Usage: sh ./scripts/memory/add-journal-entry.sh --tags \"UI,Fix\" --title \"...\" [--files \"a,b\"] [--why \"...\"] [--date YYYY-MM-DD]"
|
|
820
|
+
exit 0;;
|
|
821
|
+
*) echo "Unknown arg: $1" >&2; exit 2;;
|
|
822
|
+
esac
|
|
823
|
+
done
|
|
824
|
+
|
|
825
|
+
if [ -z "$TAGS" ] || [ -z "$TITLE" ]; then
|
|
826
|
+
echo "Missing --tags or --title" >&2
|
|
827
|
+
exit 2
|
|
828
|
+
fi
|
|
829
|
+
|
|
830
|
+
ROOT="$(cd "$(dirname "$0")/../.." && pwd)"
|
|
831
|
+
MEM="$ROOT/.cursor/memory"
|
|
832
|
+
JOURNAL_DIR="$MEM/journal"
|
|
833
|
+
TAG_VOCAB="$MEM/tag-vocabulary.md"
|
|
834
|
+
MONTH="$(echo "$DATE" | cut -c1-7)"
|
|
835
|
+
JOURNAL="$JOURNAL_DIR/$MONTH.md"
|
|
836
|
+
PROJECT_NAME="$(basename "$ROOT")"
|
|
837
|
+
|
|
838
|
+
mkdir -p "$JOURNAL_DIR"
|
|
839
|
+
|
|
840
|
+
canon_tag() {
|
|
841
|
+
want_l="$(echo "$1" | tr '[:upper:]' '[:lower:]')"
|
|
842
|
+
[ -f "$TAG_VOCAB" ] || { echo "$1"; return 0; }
|
|
843
|
+
awk -v w="$want_l" '
|
|
844
|
+
BEGIN { IGNORECASE=1 }
|
|
845
|
+
/^\- \[[^]]+\]/ {
|
|
846
|
+
t=$0
|
|
847
|
+
sub(/^\- \[/,"",t); sub(/\].*$/,"",t)
|
|
848
|
+
if (tolower(t)==w) { print t; exit }
|
|
849
|
+
}
|
|
850
|
+
' "$TAG_VOCAB" 2>/dev/null || true
|
|
851
|
+
}
|
|
852
|
+
|
|
853
|
+
tag_string=""
|
|
854
|
+
oldIFS="$IFS"; IFS=','; set -- $TAGS; IFS="$oldIFS"
|
|
855
|
+
for t in "$@"; do
|
|
856
|
+
tt="$(echo "$t" | awk '{$1=$1;print}')"
|
|
857
|
+
[ -z "$tt" ] && continue
|
|
858
|
+
canon="$(canon_tag "$tt")"
|
|
859
|
+
if [ -z "$canon" ]; then
|
|
860
|
+
echo "Unknown tag '$tt'. Add it to tag-vocabulary.md or fix the tag." >&2
|
|
861
|
+
exit 1
|
|
862
|
+
fi
|
|
863
|
+
tag_string="${tag_string}[$canon]"
|
|
864
|
+
done
|
|
865
|
+
|
|
866
|
+
entry="- $tag_string $TITLE"
|
|
867
|
+
if [ -n "$WHY" ]; then
|
|
868
|
+
entry="${entry}\n - Why: $WHY"
|
|
869
|
+
fi
|
|
870
|
+
if [ -n "$FILES" ]; then
|
|
871
|
+
entry="${entry}\n - Key files:"
|
|
872
|
+
oldIFS="$IFS"; IFS=','; set -- $FILES; IFS="$oldIFS"
|
|
873
|
+
for f in "$@"; do
|
|
874
|
+
ff="$(echo "$f" | awk '{$1=$1;print}')"
|
|
875
|
+
[ -n "$ff" ] && entry="${entry}\n - \`$ff\`"
|
|
876
|
+
done
|
|
877
|
+
fi
|
|
878
|
+
|
|
879
|
+
if [ ! -f "$JOURNAL" ]; then
|
|
880
|
+
cat > "$JOURNAL" <<EOF2
|
|
881
|
+
# Development Journal - $PROJECT_NAME ($MONTH)
|
|
882
|
+
|
|
883
|
+
## $DATE
|
|
884
|
+
|
|
885
|
+
$(printf "%b" "$entry")
|
|
886
|
+
EOF2
|
|
887
|
+
echo "Added journal entry to: $JOURNAL"
|
|
888
|
+
exit 0
|
|
889
|
+
fi
|
|
890
|
+
|
|
891
|
+
if grep -q "^## $DATE\$" "$JOURNAL"; then
|
|
892
|
+
awk -v d="$DATE" -v e="$(printf "%b" "$entry")" '
|
|
893
|
+
BEGIN { in=0; done=0 }
|
|
894
|
+
{
|
|
895
|
+
print $0
|
|
896
|
+
if ($0 == "## " d) { in=1; next }
|
|
897
|
+
if (in==1 && done==0 && $0 ~ /^## [0-9]{4}-[0-9]{2}-[0-9]{2}$/) {
|
|
898
|
+
print ""
|
|
899
|
+
print e
|
|
900
|
+
print ""
|
|
901
|
+
done=1
|
|
902
|
+
in=0
|
|
903
|
+
}
|
|
904
|
+
}
|
|
905
|
+
END {
|
|
906
|
+
if (done==0) {
|
|
907
|
+
print ""
|
|
908
|
+
print e
|
|
909
|
+
print ""
|
|
910
|
+
}
|
|
911
|
+
}
|
|
912
|
+
' "$JOURNAL" > "$JOURNAL.tmp.$$"
|
|
913
|
+
mv "$JOURNAL.tmp.$$" "$JOURNAL"
|
|
914
|
+
else
|
|
915
|
+
{
|
|
916
|
+
printf "\n## %s\n\n" "$DATE"
|
|
917
|
+
printf "%b\n" "$entry"
|
|
918
|
+
} >> "$JOURNAL"
|
|
919
|
+
fi
|
|
920
|
+
|
|
921
|
+
echo "Added journal entry to: $JOURNAL"
|
|
922
|
+
EOF
|
|
923
|
+
|
|
924
|
+
write_file "$MEM_SCRIPTS_DIR/add-lesson.sh" <<'EOF'
|
|
925
|
+
#!/bin/sh
|
|
926
|
+
set -eu
|
|
927
|
+
|
|
928
|
+
TITLE=""
|
|
929
|
+
TAGS=""
|
|
930
|
+
RULE=""
|
|
931
|
+
APPLIES_TO="*"
|
|
932
|
+
|
|
933
|
+
while [ $# -gt 0 ]; do
|
|
934
|
+
case "$1" in
|
|
935
|
+
--title) TITLE="$2"; shift 2;;
|
|
936
|
+
--tags) TAGS="$2"; shift 2;;
|
|
937
|
+
--rule) RULE="$2"; shift 2;;
|
|
938
|
+
--applies-to) APPLIES_TO="$2"; shift 2;;
|
|
939
|
+
-h|--help)
|
|
940
|
+
echo "Usage: sh ./scripts/memory/add-lesson.sh --title \"...\" --tags \"Reliability,Data\" --rule \"...\" [--applies-to \"*\"]"
|
|
941
|
+
exit 0;;
|
|
942
|
+
*) echo "Unknown arg: $1" >&2; exit 2;;
|
|
943
|
+
esac
|
|
944
|
+
done
|
|
945
|
+
|
|
946
|
+
if [ -z "$TITLE" ] || [ -z "$TAGS" ] || [ -z "$RULE" ]; then
|
|
947
|
+
echo "Missing --title/--tags/--rule" >&2
|
|
948
|
+
exit 2
|
|
949
|
+
fi
|
|
950
|
+
|
|
951
|
+
ROOT="$(cd "$(dirname "$0")/../.." && pwd)"
|
|
952
|
+
LESSONS="$ROOT/.cursor/memory/lessons"
|
|
953
|
+
TAG_VOCAB="$ROOT/.cursor/memory/tag-vocabulary.md"
|
|
954
|
+
mkdir -p "$LESSONS"
|
|
955
|
+
|
|
956
|
+
max=0
|
|
957
|
+
for f in "$LESSONS"/L-*.md; do
|
|
958
|
+
[ -e "$f" ] || continue
|
|
959
|
+
b="$(basename "$f")"
|
|
960
|
+
n="$(echo "$b" | sed -n 's/^L-\([0-9][0-9][0-9]\).*/\1/p')"
|
|
961
|
+
[ -n "$n" ] && [ "$n" -gt "$max" ] && max="$n"
|
|
962
|
+
done
|
|
963
|
+
|
|
964
|
+
next=$((max + 1))
|
|
965
|
+
ID="$(printf "L-%03d" "$next")"
|
|
966
|
+
|
|
967
|
+
kebab="$(echo "$TITLE" | tr '[:upper:]' '[:lower:]' | sed -E 's/[^a-z0-9]+/-/g; s/^-+//; s/-+$//;')"
|
|
968
|
+
[ -z "$kebab" ] && kebab="lesson"
|
|
969
|
+
file="$LESSONS/${ID}-${kebab}.md"
|
|
970
|
+
|
|
971
|
+
today="$(date +%Y-%m-%d)"
|
|
972
|
+
|
|
973
|
+
canon_tag() {
|
|
974
|
+
want_l="$(echo "$1" | tr '[:upper:]' '[:lower:]')"
|
|
975
|
+
[ -f "$TAG_VOCAB" ] || { echo "$1"; return 0; }
|
|
976
|
+
awk -v w="$want_l" '
|
|
977
|
+
BEGIN { IGNORECASE=1 }
|
|
978
|
+
/^\- \[[^]]+\]/ {
|
|
979
|
+
t=$0
|
|
980
|
+
sub(/^\- \[/,"",t); sub(/\].*$/,"",t)
|
|
981
|
+
if (tolower(t)==w) { print t; exit }
|
|
982
|
+
}
|
|
983
|
+
' "$TAG_VOCAB" 2>/dev/null || true
|
|
984
|
+
}
|
|
985
|
+
|
|
986
|
+
tags_out=""
|
|
987
|
+
oldIFS="$IFS"; IFS=','; set -- $TAGS; IFS="$oldIFS"
|
|
988
|
+
for t in "$@"; do
|
|
989
|
+
tt="$(echo "$t" | awk '{$1=$1;print}')"
|
|
990
|
+
[ -z "$tt" ] && continue
|
|
991
|
+
canon="$(canon_tag "$tt")"
|
|
992
|
+
if [ -z "$canon" ]; then
|
|
993
|
+
echo "Unknown tag '$tt'. Add it to tag-vocabulary.md or fix the tag." >&2
|
|
994
|
+
exit 1
|
|
995
|
+
fi
|
|
996
|
+
if echo ",$tags_out," | grep -qi ",$canon,"; then
|
|
997
|
+
continue
|
|
998
|
+
fi
|
|
999
|
+
if [ -z "$tags_out" ]; then tags_out="$canon"; else tags_out="$tags_out, $canon"; fi
|
|
1000
|
+
done
|
|
1001
|
+
|
|
1002
|
+
tags_list="$tags_out"
|
|
1003
|
+
if [ -z "$tags_list" ]; then
|
|
1004
|
+
echo "No valid tags provided." >&2
|
|
1005
|
+
exit 1
|
|
1006
|
+
fi
|
|
1007
|
+
|
|
1008
|
+
cat > "$file" <<EOF2
|
|
1009
|
+
---
|
|
1010
|
+
id: $ID
|
|
1011
|
+
title: $TITLE
|
|
1012
|
+
status: Active
|
|
1013
|
+
tags: [$tags_list]
|
|
1014
|
+
introduced: $today
|
|
1015
|
+
applies_to:
|
|
1016
|
+
- $APPLIES_TO
|
|
1017
|
+
triggers:
|
|
1018
|
+
- TODO: add error messages or keywords
|
|
1019
|
+
rule: $RULE
|
|
1020
|
+
---
|
|
1021
|
+
|
|
1022
|
+
# $ID - $TITLE
|
|
1023
|
+
|
|
1024
|
+
## Symptom
|
|
1025
|
+
|
|
1026
|
+
TODO
|
|
1027
|
+
|
|
1028
|
+
## Root Cause
|
|
1029
|
+
|
|
1030
|
+
TODO
|
|
1031
|
+
|
|
1032
|
+
## Wrong Approach (DO NOT REPEAT)
|
|
1033
|
+
|
|
1034
|
+
- TODO
|
|
1035
|
+
|
|
1036
|
+
## Correct Approach
|
|
1037
|
+
|
|
1038
|
+
- TODO
|
|
1039
|
+
EOF2
|
|
1040
|
+
|
|
1041
|
+
echo "Created lesson: $file"
|
|
1042
|
+
echo "Next: run scripts/memory/rebuild-memory-index.sh"
|
|
1043
|
+
EOF
|
|
1044
|
+
|
|
1045
|
+
write_file "$MEM_SCRIPTS_DIR/rebuild-memory-index.sh" <<'EOF'
|
|
1046
|
+
#!/bin/sh
|
|
1047
|
+
set -eu
|
|
1048
|
+
|
|
1049
|
+
ROOT="$(cd "$(dirname "$0")/../.." && pwd)"
|
|
1050
|
+
MEM="$ROOT/.cursor/memory"
|
|
1051
|
+
LESSONS="$MEM/lessons"
|
|
1052
|
+
JOURNAL="$MEM/journal"
|
|
1053
|
+
DIGESTS="$MEM/digests"
|
|
1054
|
+
|
|
1055
|
+
mkdir -p "$LESSONS" "$JOURNAL" "$DIGESTS"
|
|
1056
|
+
|
|
1057
|
+
gen="$(date -u +%Y-%m-%dT%H:%M:%SZ)"
|
|
1058
|
+
|
|
1059
|
+
json_escape() {
|
|
1060
|
+
# prints JSON-safe string (no surrounding quotes)
|
|
1061
|
+
printf "%s" "$1" | sed -e 's/\\/\\\\/g' -e 's/\"/\\"/g' -e ':a;N;$!ba;s/\r//g;s/\n/\\n/g'
|
|
1062
|
+
}
|
|
1063
|
+
|
|
1064
|
+
tmp_lessons="${TMPDIR:-/tmp}/mnemo-lessons.$$"
|
|
1065
|
+
sorted_lessons="${TMPDIR:-/tmp}/mnemo-lessons.sorted.$$"
|
|
1066
|
+
tmp_entries="${TMPDIR:-/tmp}/mnemo-journal-entries.$$"
|
|
1067
|
+
sorted_entries="${TMPDIR:-/tmp}/mnemo-journal-entries.sorted.$$"
|
|
1068
|
+
rm -f "$tmp_lessons" "$sorted_lessons" "$tmp_entries" "$sorted_entries"
|
|
1069
|
+
|
|
1070
|
+
# ---------------------------------
|
|
1071
|
+
# Lessons -> index.md + lessons-index.json
|
|
1072
|
+
# ---------------------------------
|
|
1073
|
+
|
|
1074
|
+
# num\tid\ttitle\tstatus\tintroduced\ttags_raw\tapplies_csv\trule\tfile
|
|
1075
|
+
for f in "$LESSONS"/L-*.md; do
|
|
1076
|
+
[ -e "$f" ] || continue
|
|
1077
|
+
bn="$(basename "$f")"
|
|
1078
|
+
awk -v file="$bn" '
|
|
1079
|
+
function trim(s){ sub(/^[ \t]+/,"",s); sub(/[ \t]+$/,"",s); return s }
|
|
1080
|
+
BEGIN{ in=0; cur=""; id=""; title=""; status=""; introduced=""; tags=""; rule=""; applies="" }
|
|
1081
|
+
NR==1 && $0=="---"{in=1; next}
|
|
1082
|
+
in==1 && $0=="---"{in=0; next}
|
|
1083
|
+
in==1{
|
|
1084
|
+
if ($0 ~ /^[ \t]*#/ || $0 ~ /^[ \t]*$/) next
|
|
1085
|
+
if ($0 ~ /^[A-Za-z0-9_]+:[ \t]*$/) {
|
|
1086
|
+
key=$1; sub(/:$/,"",key); cur=tolower(key); next
|
|
1087
|
+
}
|
|
1088
|
+
if ($0 ~ /^[ \t]*-[ \t]+/ && cur=="applies_to") {
|
|
1089
|
+
sub(/^[ \t]*-[ \t]+/,"",$0); v=trim($0)
|
|
1090
|
+
applies = applies (applies==""? v : "," v)
|
|
1091
|
+
next
|
|
1092
|
+
}
|
|
1093
|
+
if ($0 ~ /^[A-Za-z0-9_]+:[ \t]*/) {
|
|
1094
|
+
key=$1; sub(/:$/,"",key); k=tolower(key)
|
|
1095
|
+
$1=""; v=trim($0)
|
|
1096
|
+
if ((v ~ /^".*"$/) || (v ~ /^\047.*\047$/)) { v=substr(v,2,length(v)-2) }
|
|
1097
|
+
if (k=="id") id=v
|
|
1098
|
+
else if (k=="title") title=v
|
|
1099
|
+
else if (k=="status") status=v
|
|
1100
|
+
else if (k=="introduced") introduced=v
|
|
1101
|
+
else if (k=="rule") rule=v
|
|
1102
|
+
else if (k=="tags") tags=v
|
|
1103
|
+
cur=""
|
|
1104
|
+
next
|
|
1105
|
+
}
|
|
1106
|
+
}
|
|
1107
|
+
END{
|
|
1108
|
+
if (id=="") exit
|
|
1109
|
+
num=0
|
|
1110
|
+
if (id ~ /^L-[0-9]+$/) { idn=id; sub(/^L-/,"",idn); num=idn+0 }
|
|
1111
|
+
if (status=="") status="Active"
|
|
1112
|
+
if (title=="") title=file
|
|
1113
|
+
if (rule=="") rule=title
|
|
1114
|
+
print num "\t" id "\t" title "\t" status "\t" introduced "\t" tags "\t" applies "\t" rule "\t" file
|
|
1115
|
+
}
|
|
1116
|
+
' "$f" >>"$tmp_lessons" 2>/dev/null || true
|
|
1117
|
+
done
|
|
1118
|
+
|
|
1119
|
+
if [ -f "$tmp_lessons" ]; then
|
|
1120
|
+
sort -n -k1,1 "$tmp_lessons" >"$sorted_lessons" || true
|
|
1121
|
+
else
|
|
1122
|
+
: >"$sorted_lessons"
|
|
1123
|
+
fi
|
|
1124
|
+
|
|
1125
|
+
out_md="$LESSONS/index.md"
|
|
1126
|
+
{
|
|
1127
|
+
echo "# Lessons Index (generated)"
|
|
1128
|
+
echo ""
|
|
1129
|
+
echo "Generated: $gen"
|
|
1130
|
+
echo ""
|
|
1131
|
+
echo "Format: ID | [Tags] | AppliesTo | Rule | File"
|
|
1132
|
+
echo ""
|
|
1133
|
+
} >"$out_md"
|
|
1134
|
+
|
|
1135
|
+
lesson_count=0
|
|
1136
|
+
while IFS="$(printf '\t')" read -r num id title status introduced tags applies rule file; do
|
|
1137
|
+
[ -z "$id" ] && continue
|
|
1138
|
+
lesson_count=$((lesson_count + 1))
|
|
1139
|
+
tagText="$(printf "%s" "$tags" | sed -n 's/^[[:space:]]*\[\(.*\)\][[:space:]]*$/\1/p' | sed 's/[[:space:]]*,[[:space:]]*/,/g' | awk -F, '{for(i=1;i<=NF;i++){if($i!=""){printf "[%s]",$i}}}')"
|
|
1140
|
+
appliesText="(any)"
|
|
1141
|
+
if [ -n "$applies" ]; then appliesText="$(printf "%s" "$applies" | sed 's/,/, /g')"; fi
|
|
1142
|
+
printf "%s | %s | %s | %s | `%s`\n" "$id" "$tagText" "$appliesText" "$rule" "$file" >>"$out_md"
|
|
1143
|
+
done <"$sorted_lessons"
|
|
1144
|
+
|
|
1145
|
+
if [ "$lesson_count" -eq 0 ]; then
|
|
1146
|
+
echo "(No lessons yet.)" >>"$out_md"
|
|
1147
|
+
fi
|
|
1148
|
+
|
|
1149
|
+
out_json="$MEM/lessons-index.json"
|
|
1150
|
+
{
|
|
1151
|
+
echo "["
|
|
1152
|
+
first=1
|
|
1153
|
+
while IFS="$(printf '\t')" read -r num id title status introduced tags applies rule file; do
|
|
1154
|
+
[ -z "$id" ] && continue
|
|
1155
|
+
tags_inner="$(printf "%s" "$tags" | sed -n 's/^[[:space:]]*\[\(.*\)\][[:space:]]*$/\1/p' | sed 's/[[:space:]]*,[[:space:]]*/,/g')"
|
|
1156
|
+
tags_json=""
|
|
1157
|
+
oldIFS="$IFS"; IFS=','; set -- $tags_inner; IFS="$oldIFS"
|
|
1158
|
+
for t in "$@"; do
|
|
1159
|
+
tt="$(printf "%s" "$t" | awk '{$1=$1;print}')"
|
|
1160
|
+
[ -z "$tt" ] && continue
|
|
1161
|
+
[ -n "$tags_json" ] && tags_json="$tags_json,"
|
|
1162
|
+
tags_json="$tags_json\"$(json_escape "$tt")\""
|
|
1163
|
+
done
|
|
1164
|
+
applies_json=""
|
|
1165
|
+
if [ -n "$applies" ]; then
|
|
1166
|
+
oldIFS="$IFS"; IFS=','; set -- $applies; IFS="$oldIFS"
|
|
1167
|
+
for a in "$@"; do
|
|
1168
|
+
aa="$(printf "%s" "$a" | awk '{$1=$1;print}')"
|
|
1169
|
+
[ -z "$aa" ] && continue
|
|
1170
|
+
[ -n "$applies_json" ] && applies_json="$applies_json,"
|
|
1171
|
+
applies_json="$applies_json\"$(json_escape "$aa")\""
|
|
1172
|
+
done
|
|
1173
|
+
fi
|
|
1174
|
+
if [ "$first" -eq 1 ]; then first=0; else echo ","; fi
|
|
1175
|
+
printf " {\"Id\":\"%s\",\"Num\":%s,\"Title\":\"%s\",\"Status\":\"%s\",\"Introduced\":\"%s\",\"Tags\":[%s],\"AppliesTo\":[%s],\"Rule\":\"%s\",\"File\":\"%s\"}" \
|
|
1176
|
+
"$(json_escape "$id")" \
|
|
1177
|
+
"${num:-0}" \
|
|
1178
|
+
"$(json_escape "$title")" \
|
|
1179
|
+
"$(json_escape "$status")" \
|
|
1180
|
+
"$(json_escape "$introduced")" \
|
|
1181
|
+
"$tags_json" \
|
|
1182
|
+
"$applies_json" \
|
|
1183
|
+
"$(json_escape "$rule")" \
|
|
1184
|
+
"$(json_escape "$file")"
|
|
1185
|
+
done <"$sorted_lessons"
|
|
1186
|
+
echo ""
|
|
1187
|
+
echo "]"
|
|
1188
|
+
} >"$out_json"
|
|
1189
|
+
|
|
1190
|
+
# ---------------------------------
|
|
1191
|
+
# Journal -> journal-index.md + journal-index.json + digests
|
|
1192
|
+
# ---------------------------------
|
|
1193
|
+
|
|
1194
|
+
for jf in "$JOURNAL"/*.md; do
|
|
1195
|
+
[ -e "$jf" ] || continue
|
|
1196
|
+
base="$(basename "$jf")"
|
|
1197
|
+
[ "$base" = "README.md" ] && continue
|
|
1198
|
+
case "$base" in
|
|
1199
|
+
????-??.md) ;;
|
|
1200
|
+
*) continue ;;
|
|
1201
|
+
esac
|
|
1202
|
+
|
|
1203
|
+
# monthfile\tdate\ttags_csv\ttitle\tfiles_csv
|
|
1204
|
+
awk -v mf="$base" '
|
|
1205
|
+
function trim(s){ sub(/^[ \t]+/,"",s); sub(/[ \t]+$/,"",s); return s }
|
|
1206
|
+
function addfile(v){
|
|
1207
|
+
if (v=="") return
|
|
1208
|
+
if (v ~ /[\/\\]/ || v ~ /\.(cs|md|mdx|yml|yaml|csproj|ps1|sh|ts|tsx|json|py)$/) {
|
|
1209
|
+
if (files=="" || (","files"," !~ ","v",")) files = files (files==""? v : "," v)
|
|
1210
|
+
}
|
|
1211
|
+
}
|
|
1212
|
+
function flush(){
|
|
1213
|
+
if (inEntry==1 && date!="") {
|
|
1214
|
+
print mf "\t" date "\t" tags "\t" title "\t" files
|
|
1215
|
+
}
|
|
1216
|
+
}
|
|
1217
|
+
BEGIN{ date=""; inEntry=0; tags=""; title=""; files="" }
|
|
1218
|
+
/^##[ \t]+[0-9]{4}-[0-9]{2}-[0-9]{2}/{
|
|
1219
|
+
flush()
|
|
1220
|
+
inEntry=0
|
|
1221
|
+
tags=""; title=""; files=""
|
|
1222
|
+
date=$2
|
|
1223
|
+
next
|
|
1224
|
+
}
|
|
1225
|
+
/^-[ \t]+(\[[^]]+\])+/{
|
|
1226
|
+
flush()
|
|
1227
|
+
inEntry=1
|
|
1228
|
+
files=""; tags=""; title=""
|
|
1229
|
+
line=$0
|
|
1230
|
+
while (match(line, /\[[^]]+\]/)) {
|
|
1231
|
+
t=substr(line, RSTART+1, RLENGTH-2)
|
|
1232
|
+
tags = tags (tags==""? t : "," t)
|
|
1233
|
+
line = substr(line, RSTART+RLENGTH)
|
|
1234
|
+
}
|
|
1235
|
+
sub(/^[ \t]*-+[ \t]*/,"",$0)
|
|
1236
|
+
tline=$0
|
|
1237
|
+
gsub(/\[[^]]+\]/,"",tline)
|
|
1238
|
+
title=trim(tline)
|
|
1239
|
+
# collect backticks on same line
|
|
1240
|
+
line2=$0
|
|
1241
|
+
while (match(line2, /`[^`]+`/)) {
|
|
1242
|
+
v=substr(line2, RSTART+1, RLENGTH-2); addfile(v)
|
|
1243
|
+
line2=substr(line2, RSTART+RLENGTH)
|
|
1244
|
+
}
|
|
1245
|
+
next
|
|
1246
|
+
}
|
|
1247
|
+
inEntry==1{
|
|
1248
|
+
line=$0
|
|
1249
|
+
while (match(line, /`[^`]+`/)) {
|
|
1250
|
+
v=substr(line, RSTART+1, RLENGTH-2); addfile(v)
|
|
1251
|
+
line=substr(line, RSTART+RLENGTH)
|
|
1252
|
+
}
|
|
1253
|
+
next
|
|
1254
|
+
}
|
|
1255
|
+
END{ flush() }
|
|
1256
|
+
' "$jf" >>"$tmp_entries" 2>/dev/null || true
|
|
1257
|
+
|
|
1258
|
+
month="${base%.md}"
|
|
1259
|
+
digest="$DIGESTS/$month.digest.md"
|
|
1260
|
+
{
|
|
1261
|
+
echo "# Monthly Digest - $month (generated)"
|
|
1262
|
+
echo ""
|
|
1263
|
+
echo "Generated: $gen"
|
|
1264
|
+
echo ""
|
|
1265
|
+
echo "Token-cheap summary. See \`.cursor/memory/journal/$base\` for details."
|
|
1266
|
+
echo ""
|
|
1267
|
+
} >"$digest"
|
|
1268
|
+
|
|
1269
|
+
awk '
|
|
1270
|
+
function trim(s){ sub(/^[ \t]+/,"",s); sub(/[ \t]+$/,"",s); return s }
|
|
1271
|
+
/^##[ \t]+[0-9]{4}-[0-9]{2}-[0-9]{2}/{
|
|
1272
|
+
d=$2
|
|
1273
|
+
print "## " d "\n"
|
|
1274
|
+
next
|
|
1275
|
+
}
|
|
1276
|
+
/^-[ \t]+(\[[^]]+\])+/{
|
|
1277
|
+
sub(/^[ \t]*-+[ \t]*/,"",$0)
|
|
1278
|
+
line=$0
|
|
1279
|
+
tags=""
|
|
1280
|
+
while (match(line, /\[[^]]+\]/)) {
|
|
1281
|
+
tags=tags substr(line, RSTART, RLENGTH)
|
|
1282
|
+
line=substr(line, RSTART+RLENGTH)
|
|
1283
|
+
}
|
|
1284
|
+
title=$0
|
|
1285
|
+
gsub(/\[[^]]+\]/,"",title)
|
|
1286
|
+
title=trim(title)
|
|
1287
|
+
print "- " tags " " title
|
|
1288
|
+
next
|
|
1289
|
+
}
|
|
1290
|
+
' "$jf" >>"$digest"
|
|
1291
|
+
done
|
|
1292
|
+
|
|
1293
|
+
if [ -f "$tmp_entries" ]; then
|
|
1294
|
+
sort -k2,2 -k4,4 "$tmp_entries" >"$sorted_entries" || true
|
|
1295
|
+
else
|
|
1296
|
+
: >"$sorted_entries"
|
|
1297
|
+
fi
|
|
1298
|
+
|
|
1299
|
+
ji="$MEM/journal-index.md"
|
|
1300
|
+
{
|
|
1301
|
+
echo "# Journal Index (generated)"
|
|
1302
|
+
echo ""
|
|
1303
|
+
echo "Generated: $gen"
|
|
1304
|
+
echo ""
|
|
1305
|
+
echo "Format: YYYY-MM-DD | [Tags] | Title | Files"
|
|
1306
|
+
echo ""
|
|
1307
|
+
} >"$ji"
|
|
1308
|
+
|
|
1309
|
+
while IFS="$(printf '\t')" read -r mf date tags title files; do
|
|
1310
|
+
[ -z "$date" ] && continue
|
|
1311
|
+
tagText=""
|
|
1312
|
+
oldIFS="$IFS"; IFS=','; set -- $tags; IFS="$oldIFS"
|
|
1313
|
+
for t in "$@"; do
|
|
1314
|
+
tt="$(printf "%s" "$t" | awk '{$1=$1;print}')"
|
|
1315
|
+
[ -n "$tt" ] && tagText="${tagText}[$tt]"
|
|
1316
|
+
done
|
|
1317
|
+
fileText="-"
|
|
1318
|
+
[ -n "$files" ] && fileText="$(printf "%s" "$files" | sed 's/,/, /g')"
|
|
1319
|
+
printf "%s | %s | %s | %s\n" "$date" "$tagText" "$title" "$fileText" >>"$ji"
|
|
1320
|
+
done <"$sorted_entries"
|
|
1321
|
+
|
|
1322
|
+
out_jjson="$MEM/journal-index.json"
|
|
1323
|
+
{
|
|
1324
|
+
echo "["
|
|
1325
|
+
first=1
|
|
1326
|
+
while IFS="$(printf '\t')" read -r mf date tags title files; do
|
|
1327
|
+
[ -z "$date" ] && continue
|
|
1328
|
+
tags_json=""
|
|
1329
|
+
oldIFS="$IFS"; IFS=','; set -- $tags; IFS="$oldIFS"
|
|
1330
|
+
for t in "$@"; do
|
|
1331
|
+
tt="$(printf "%s" "$t" | awk '{$1=$1;print}')"
|
|
1332
|
+
[ -z "$tt" ] && continue
|
|
1333
|
+
[ -n "$tags_json" ] && tags_json="$tags_json,"
|
|
1334
|
+
tags_json="$tags_json\"$(json_escape "$tt")\""
|
|
1335
|
+
done
|
|
1336
|
+
files_json=""
|
|
1337
|
+
if [ -n "$files" ]; then
|
|
1338
|
+
oldIFS="$IFS"; IFS=','; set -- $files; IFS="$oldIFS"
|
|
1339
|
+
for f in "$@"; do
|
|
1340
|
+
ff="$(printf "%s" "$f" | awk '{$1=$1;print}')"
|
|
1341
|
+
[ -z "$ff" ] && continue
|
|
1342
|
+
[ -n "$files_json" ] && files_json="$files_json,"
|
|
1343
|
+
files_json="$files_json\"$(json_escape "$ff")\""
|
|
1344
|
+
done
|
|
1345
|
+
fi
|
|
1346
|
+
if [ "$first" -eq 1 ]; then first=0; else echo ","; fi
|
|
1347
|
+
printf " {\"MonthFile\":\"%s\",\"Date\":\"%s\",\"Tags\":[%s],\"Title\":\"%s\",\"Files\":[%s]}" \
|
|
1348
|
+
"$(json_escape "$mf")" \
|
|
1349
|
+
"$(json_escape "$date")" \
|
|
1350
|
+
"$tags_json" \
|
|
1351
|
+
"$(json_escape "$title")" \
|
|
1352
|
+
"$files_json"
|
|
1353
|
+
done <"$sorted_entries"
|
|
1354
|
+
echo ""
|
|
1355
|
+
echo "]"
|
|
1356
|
+
} >"$out_jjson"
|
|
1357
|
+
|
|
1358
|
+
# Optional: build SQLite index if python3 exists
|
|
1359
|
+
if command -v python3 >/dev/null 2>&1 && [ -f "$ROOT/scripts/memory/build-memory-sqlite.py" ]; then
|
|
1360
|
+
echo "Python3 detected; building SQLite FTS index..."
|
|
1361
|
+
python3 "$ROOT/scripts/memory/build-memory-sqlite.py" --repo "$ROOT" || true
|
|
1362
|
+
else
|
|
1363
|
+
echo "Python3 not found; skipping SQLite build."
|
|
1364
|
+
fi
|
|
1365
|
+
|
|
1366
|
+
# Token usage monitoring (informational)
|
|
1367
|
+
totalChars=0
|
|
1368
|
+
for hf in "$MEM/hot-rules.md" "$MEM/active-context.md" "$MEM/memo.md"; do
|
|
1369
|
+
[ -f "$hf" ] || continue
|
|
1370
|
+
c="$(wc -c < "$hf" | awk '{$1=$1;print}')"
|
|
1371
|
+
totalChars=$((totalChars + c))
|
|
1372
|
+
done
|
|
1373
|
+
estimatedTokens=$((totalChars / 4))
|
|
1374
|
+
echo ""
|
|
1375
|
+
if [ "$totalChars" -gt 8000 ]; then
|
|
1376
|
+
echo "WARNING: Always-read layer is $totalChars chars (~$estimatedTokens tokens)"
|
|
1377
|
+
else
|
|
1378
|
+
echo "Always-read layer: $totalChars chars (~$estimatedTokens tokens) - Healthy"
|
|
1379
|
+
fi
|
|
1380
|
+
|
|
1381
|
+
rm -f "$tmp_lessons" "$sorted_lessons" "$tmp_entries" "$sorted_entries" 2>/dev/null || true
|
|
1382
|
+
echo ""
|
|
1383
|
+
echo "Rebuild complete."
|
|
1384
|
+
EOF
|
|
1385
|
+
|
|
1386
|
+
write_file "$MEM_SCRIPTS_DIR/lint-memory.sh" <<'EOF'
|
|
1387
|
+
#!/bin/sh
|
|
1388
|
+
set -eu
|
|
1389
|
+
|
|
1390
|
+
ROOT="$(cd "$(dirname "$0")/../.." && pwd)"
|
|
1391
|
+
MEM="$ROOT/.cursor/memory"
|
|
1392
|
+
LESSONS="$MEM/lessons"
|
|
1393
|
+
JOURNAL="$MEM/journal"
|
|
1394
|
+
TAG_VOCAB="$MEM/tag-vocabulary.md"
|
|
1395
|
+
|
|
1396
|
+
hot="$MEM/hot-rules.md"
|
|
1397
|
+
active="$MEM/active-context.md"
|
|
1398
|
+
memo="$MEM/memo.md"
|
|
1399
|
+
|
|
1400
|
+
errors=0
|
|
1401
|
+
warnings=0
|
|
1402
|
+
|
|
1403
|
+
err() { echo " ERROR: $1" >&2; errors=$((errors + 1)); }
|
|
1404
|
+
warn() { echo " WARN: $1" >&2; warnings=$((warnings + 1)); }
|
|
1405
|
+
|
|
1406
|
+
echo "Linting Mnemo Memory System..."
|
|
1407
|
+
echo ""
|
|
1408
|
+
|
|
1409
|
+
# Allowed tags
|
|
1410
|
+
allowed_tmp="${TMPDIR:-/tmp}/mnemo-allowed-tags.$$"
|
|
1411
|
+
rm -f "$allowed_tmp"
|
|
1412
|
+
if [ -f "$TAG_VOCAB" ]; then
|
|
1413
|
+
awk '/^\- \[[^]]+\]/{t=$0; sub(/^\- \[/,"",t); sub(/\].*$/,"",t); print t}' "$TAG_VOCAB" >"$allowed_tmp"
|
|
1414
|
+
else
|
|
1415
|
+
warn "Missing tag vocabulary: $TAG_VOCAB"
|
|
1416
|
+
: >"$allowed_tmp"
|
|
1417
|
+
fi
|
|
1418
|
+
|
|
1419
|
+
echo "Checking lessons..."
|
|
1420
|
+
ids_tmp="${TMPDIR:-/tmp}/mnemo-lesson-ids.$$"
|
|
1421
|
+
rm -f "$ids_tmp"
|
|
1422
|
+
|
|
1423
|
+
lesson_count=0
|
|
1424
|
+
for lf in "$LESSONS"/L-*.md; do
|
|
1425
|
+
[ -e "$lf" ] || continue
|
|
1426
|
+
lesson_count=$((lesson_count + 1))
|
|
1427
|
+
bn="$(basename "$lf")"
|
|
1428
|
+
|
|
1429
|
+
first="$(awk 'NR==1{print $0; exit}' "$lf" 2>/dev/null || true)"
|
|
1430
|
+
if [ "$first" != "---" ]; then
|
|
1431
|
+
err "[$bn] Missing YAML frontmatter"
|
|
1432
|
+
continue
|
|
1433
|
+
fi
|
|
1434
|
+
|
|
1435
|
+
id="$(awk 'NR==1 && $0=="---"{in=1;next} in && $0=="---"{exit} in && $1=="id:"{print $2; exit}' "$lf" 2>/dev/null || true)"
|
|
1436
|
+
title="$(awk 'NR==1 && $0=="---"{in=1;next} in && $0=="---"{exit} in && $1=="title:"{$1=""; sub(/^ /,""); print; exit}' "$lf" 2>/dev/null || true)"
|
|
1437
|
+
status="$(awk 'NR==1 && $0=="---"{in=1;next} in && $0=="---"{exit} in && $1=="status:"{$1=""; sub(/^ /,""); print; exit}' "$lf" 2>/dev/null || true)"
|
|
1438
|
+
tags="$(awk 'NR==1 && $0=="---"{in=1;next} in && $0=="---"{exit} in && $1=="tags:"{$1=""; sub(/^ /,""); print; exit}' "$lf" 2>/dev/null || true)"
|
|
1439
|
+
introduced="$(awk 'NR==1 && $0=="---"{in=1;next} in && $0=="---"{exit} in && $1=="introduced:"{print $2; exit}' "$lf" 2>/dev/null || true)"
|
|
1440
|
+
rule="$(awk 'NR==1 && $0=="---"{in=1;next} in && $0=="---"{exit} in && $1=="rule:"{$1=""; sub(/^ /,""); print; exit}' "$lf" 2>/dev/null || true)"
|
|
1441
|
+
|
|
1442
|
+
[ -z "$id" ] && err "[$bn] Missing required field: id"
|
|
1443
|
+
[ -z "$title" ] && err "[$bn] Missing required field: title"
|
|
1444
|
+
[ -z "$status" ] && err "[$bn] Missing required field: status"
|
|
1445
|
+
[ -z "$tags" ] && err "[$bn] Missing required field: tags"
|
|
1446
|
+
[ -z "$introduced" ] && err "[$bn] Missing required field: introduced"
|
|
1447
|
+
[ -z "$rule" ] && err "[$bn] Missing required field: rule"
|
|
1448
|
+
|
|
1449
|
+
if [ -n "$id" ]; then
|
|
1450
|
+
echo "$id $bn" >>"$ids_tmp"
|
|
1451
|
+
echo "$id" | grep -Eq '^L-[0-9]{3}$' || warn "[$bn] ID '$id' doesn't match format L-XXX (3 digits)"
|
|
1452
|
+
pref="$(echo "$id" | tr '[:upper:]' '[:lower:]')"
|
|
1453
|
+
echo "$bn" | tr '[:upper:]' '[:lower:]' | grep -q "^$pref" || warn "[$bn] Filename doesn't start with ID '$id'"
|
|
1454
|
+
fi
|
|
1455
|
+
|
|
1456
|
+
if [ -s "$allowed_tmp" ] && [ -n "$tags" ]; then
|
|
1457
|
+
inner="$(printf "%s" "$tags" | sed -n 's/^[[:space:]]*\[\(.*\)\][[:space:]]*$/\1/p' | sed 's/[[:space:]]*,[[:space:]]*/,/g')"
|
|
1458
|
+
oldIFS="$IFS"; IFS=','; set -- $inner; IFS="$oldIFS"
|
|
1459
|
+
for t in "$@"; do
|
|
1460
|
+
tt="$(printf "%s" "$t" | awk '{$1=$1;print}')"
|
|
1461
|
+
[ -z "$tt" ] && continue
|
|
1462
|
+
if ! grep -Fxq "$tt" "$allowed_tmp"; then
|
|
1463
|
+
err "[$bn] Unknown tag [$tt]. Add it to tag-vocabulary.md or fix the lesson."
|
|
1464
|
+
fi
|
|
1465
|
+
done
|
|
1466
|
+
fi
|
|
1467
|
+
done
|
|
1468
|
+
|
|
1469
|
+
echo " Found $lesson_count lesson files"
|
|
1470
|
+
|
|
1471
|
+
# Duplicate IDs
|
|
1472
|
+
if [ -f "$ids_tmp" ]; then
|
|
1473
|
+
dups="$(cut -f1 "$ids_tmp" | sort | uniq -d || true)"
|
|
1474
|
+
if [ -n "$dups" ]; then
|
|
1475
|
+
echo "$dups" | while IFS= read -r did; do
|
|
1476
|
+
[ -z "$did" ] && continue
|
|
1477
|
+
files="$(awk -v i="$did" -F'\t' '$1==i{print $2}' "$ids_tmp" | paste -sd', ' -)"
|
|
1478
|
+
err "Duplicate lesson ID $did (files: $files)"
|
|
1479
|
+
done
|
|
1480
|
+
fi
|
|
1481
|
+
fi
|
|
1482
|
+
|
|
1483
|
+
echo ""
|
|
1484
|
+
echo "Checking journals..."
|
|
1485
|
+
journal_count=0
|
|
1486
|
+
for jf in "$JOURNAL"/????-??.md; do
|
|
1487
|
+
[ -e "$jf" ] || continue
|
|
1488
|
+
journal_count=$((journal_count + 1))
|
|
1489
|
+
bn="$(basename "$jf")"
|
|
1490
|
+
dups="$(awk '/^##[ \t]+[0-9]{4}-[0-9]{2}-[0-9]{2}/{print $2}' "$jf" | sort | uniq -d || true)"
|
|
1491
|
+
if [ -n "$dups" ]; then
|
|
1492
|
+
echo "$dups" | while IFS= read -r d; do
|
|
1493
|
+
[ -z "$d" ] && continue
|
|
1494
|
+
c="$(awk -v dd="$d" '/^##[ \t]+[0-9]{4}-[0-9]{2}-[0-9]{2}/{if($2==dd) n++} END{print n+0}' "$jf")"
|
|
1495
|
+
err "[$bn] Duplicate date heading $d x$c. Merge into one section."
|
|
1496
|
+
done
|
|
1497
|
+
fi
|
|
1498
|
+
done
|
|
1499
|
+
echo " Found $journal_count journal files"
|
|
1500
|
+
|
|
1501
|
+
echo ""
|
|
1502
|
+
echo "Checking token budget..."
|
|
1503
|
+
total=0
|
|
1504
|
+
for f in "$hot" "$active" "$memo"; do
|
|
1505
|
+
[ -f "$f" ] || continue
|
|
1506
|
+
c="$(wc -c < "$f" | awk '{$1=$1;print}')"
|
|
1507
|
+
total=$((total + c))
|
|
1508
|
+
if [ "$c" -gt 3000 ]; then
|
|
1509
|
+
warn "[$(basename "$f")] File is $c chars (~$((c/4)) tokens) - consider trimming"
|
|
1510
|
+
fi
|
|
1511
|
+
done
|
|
1512
|
+
echo " Always-read layer: $total chars (~$((total/4)) tokens)"
|
|
1513
|
+
if [ "$total" -gt 8000 ]; then
|
|
1514
|
+
err "[Token Budget] Always-read layer exceeds 8000 chars (~2000 tokens)"
|
|
1515
|
+
elif [ "$total" -gt 6000 ]; then
|
|
1516
|
+
warn "[Token Budget] Always-read layer is $total chars - approaching limit"
|
|
1517
|
+
fi
|
|
1518
|
+
|
|
1519
|
+
echo ""
|
|
1520
|
+
echo "Checking for orphans..."
|
|
1521
|
+
[ -f "$LESSONS/index.md" ] || warn "[lessons/index.md] Missing - run rebuild-memory-index.sh"
|
|
1522
|
+
[ -f "$MEM/journal-index.md" ] || warn "[journal-index.md] Missing - run rebuild-memory-index.sh"
|
|
1523
|
+
|
|
1524
|
+
echo ""
|
|
1525
|
+
echo "====== LINT RESULTS ======"
|
|
1526
|
+
echo "Errors: $errors"
|
|
1527
|
+
echo "Warnings: $warnings"
|
|
1528
|
+
|
|
1529
|
+
rm -f "$allowed_tmp" "$ids_tmp" 2>/dev/null || true
|
|
1530
|
+
|
|
1531
|
+
if [ "$errors" -gt 0 ]; then
|
|
1532
|
+
echo ""
|
|
1533
|
+
echo "Lint FAILED with $errors error(s)" >&2
|
|
1534
|
+
exit 1
|
|
1535
|
+
fi
|
|
1536
|
+
echo ""
|
|
1537
|
+
echo "Lint passed"
|
|
1538
|
+
EOF
|
|
1539
|
+
|
|
1540
|
+
# -------------------------
|
|
1541
|
+
# Git hook (portable)
|
|
1542
|
+
# -------------------------
|
|
1543
|
+
|
|
1544
|
+
write_file "$GITHOOKS_DIR/pre-commit" <<'EOF'
|
|
1545
|
+
#!/bin/sh
|
|
1546
|
+
set -e
|
|
1547
|
+
|
|
1548
|
+
ROOT="$(git rev-parse --show-toplevel 2>/dev/null || pwd)"
|
|
1549
|
+
cd "$ROOT"
|
|
1550
|
+
|
|
1551
|
+
echo "[Mnemo] Rebuilding indexes + lint..."
|
|
1552
|
+
sh "./scripts/memory/rebuild-memory-index.sh"
|
|
1553
|
+
sh "./scripts/memory/lint-memory.sh"
|
|
1554
|
+
|
|
1555
|
+
for p in \
|
|
1556
|
+
.mnemo/memory/lessons/index.md \
|
|
1557
|
+
.mnemo/memory/lessons-index.json \
|
|
1558
|
+
.mnemo/memory/journal-index.md \
|
|
1559
|
+
.mnemo/memory/journal-index.json \
|
|
1560
|
+
.mnemo/memory/digests/*.digest.md \
|
|
1561
|
+
.cursor/memory/lessons/index.md \
|
|
1562
|
+
.cursor/memory/lessons-index.json \
|
|
1563
|
+
.cursor/memory/journal-index.md \
|
|
1564
|
+
.cursor/memory/journal-index.json \
|
|
1565
|
+
.cursor/memory/digests/*.digest.md
|
|
1566
|
+
do
|
|
1567
|
+
git add $p 2>/dev/null || true
|
|
1568
|
+
done
|
|
1569
|
+
exit 0
|
|
1570
|
+
EOF
|
|
1571
|
+
|
|
1572
|
+
# Also write .git/hooks/pre-commit for immediate effect (best effort)
|
|
1573
|
+
if [ -d "$REPO_ROOT/.git/hooks" ]; then
|
|
1574
|
+
legacy="$REPO_ROOT/.git/hooks/pre-commit"
|
|
1575
|
+
if [ -f "$legacy" ] && [ "$FORCE" != "1" ]; then
|
|
1576
|
+
if grep -q "Mnemo" "$legacy" 2>/dev/null; then
|
|
1577
|
+
echo "SKIP (exists): $legacy"
|
|
1578
|
+
else
|
|
1579
|
+
printf "\n\n" >>"$legacy" || true
|
|
1580
|
+
cat "$GITHOOKS_DIR/pre-commit" >>"$legacy" || true
|
|
1581
|
+
echo "Updated: $legacy"
|
|
1582
|
+
fi
|
|
1583
|
+
else
|
|
1584
|
+
cp "$GITHOOKS_DIR/pre-commit" "$legacy" 2>/dev/null || true
|
|
1585
|
+
fi
|
|
1586
|
+
fi
|
|
1587
|
+
|
|
1588
|
+
# Optional: write python helpers (for SQLite build/query). Used only if python3 exists.
|
|
1589
|
+
write_file "$MEM_SCRIPTS_DIR/build-memory-sqlite.py" <<'EOF'
|
|
1590
|
+
#!/usr/bin/env python3
|
|
1591
|
+
"""Build SQLite FTS5 index from memory JSON indexes."""
|
|
1592
|
+
import argparse
|
|
1593
|
+
import json
|
|
1594
|
+
import sqlite3
|
|
1595
|
+
from pathlib import Path
|
|
1596
|
+
|
|
1597
|
+
def read_text(p: Path) -> str:
|
|
1598
|
+
return p.read_text(encoding="utf-8-sig", errors="replace")
|
|
1599
|
+
|
|
1600
|
+
def main():
|
|
1601
|
+
ap = argparse.ArgumentParser()
|
|
1602
|
+
ap.add_argument("--repo", required=True)
|
|
1603
|
+
args = ap.parse_args()
|
|
1604
|
+
|
|
1605
|
+
repo = Path(args.repo)
|
|
1606
|
+
mem = repo / ".cursor" / "memory"
|
|
1607
|
+
out_db = mem / "memory.sqlite"
|
|
1608
|
+
|
|
1609
|
+
lessons_index = mem / "lessons-index.json"
|
|
1610
|
+
journal_index = mem / "journal-index.json"
|
|
1611
|
+
|
|
1612
|
+
lessons = []
|
|
1613
|
+
if lessons_index.exists():
|
|
1614
|
+
t = read_text(lessons_index).strip()
|
|
1615
|
+
if t:
|
|
1616
|
+
lessons = json.loads(t)
|
|
1617
|
+
if not isinstance(lessons, list):
|
|
1618
|
+
lessons = [lessons] if lessons else []
|
|
1619
|
+
|
|
1620
|
+
journal = []
|
|
1621
|
+
if journal_index.exists():
|
|
1622
|
+
t = read_text(journal_index).strip()
|
|
1623
|
+
if t:
|
|
1624
|
+
journal = json.loads(t)
|
|
1625
|
+
if not isinstance(journal, list):
|
|
1626
|
+
journal = [journal] if journal else []
|
|
1627
|
+
|
|
1628
|
+
if out_db.exists():
|
|
1629
|
+
out_db.unlink()
|
|
1630
|
+
|
|
1631
|
+
con = sqlite3.connect(str(out_db))
|
|
1632
|
+
cur = con.cursor()
|
|
1633
|
+
cur.execute("CREATE VIRTUAL TABLE memory_fts USING fts5(kind, id, date, tags, title, content, path);")
|
|
1634
|
+
|
|
1635
|
+
for kind, fid, path in [
|
|
1636
|
+
("hot_rules", "HOT", mem / "hot-rules.md"),
|
|
1637
|
+
("active", "ACTIVE", mem / "active-context.md"),
|
|
1638
|
+
("memo", "MEMO", mem / "memo.md"),
|
|
1639
|
+
]:
|
|
1640
|
+
if path.exists():
|
|
1641
|
+
cur.execute(
|
|
1642
|
+
"INSERT INTO memory_fts(kind,id,date,tags,title,content,path) VALUES (?,?,?,?,?,?,?)",
|
|
1643
|
+
(kind, fid, None, "", path.name, read_text(path), str(path)),
|
|
1644
|
+
)
|
|
1645
|
+
|
|
1646
|
+
lessons_dir = mem / "lessons"
|
|
1647
|
+
for l in lessons:
|
|
1648
|
+
lid = l.get("Id")
|
|
1649
|
+
title = l.get("Title", "")
|
|
1650
|
+
tags = " ".join(l.get("Tags") or [])
|
|
1651
|
+
date = l.get("Introduced")
|
|
1652
|
+
file = l.get("File", "")
|
|
1653
|
+
path = lessons_dir / file if file else (mem / "lessons.md")
|
|
1654
|
+
content = read_text(path) if path.exists() else f"{title}\nRule: {l.get('Rule','')}"
|
|
1655
|
+
cur.execute(
|
|
1656
|
+
"INSERT INTO memory_fts(kind,id,date,tags,title,content,path) VALUES (?,?,?,?,?,?,?)",
|
|
1657
|
+
("lesson", lid, date, tags, title, content, str(path)),
|
|
1658
|
+
)
|
|
1659
|
+
|
|
1660
|
+
for e in journal:
|
|
1661
|
+
tags = " ".join(e.get("Tags") or [])
|
|
1662
|
+
files = e.get("Files") or []
|
|
1663
|
+
if isinstance(files, dict):
|
|
1664
|
+
files = []
|
|
1665
|
+
content = f"{e.get('Title','')}\nFiles: {', '.join(files)}"
|
|
1666
|
+
path = mem / "journal" / (e.get("MonthFile") or "")
|
|
1667
|
+
cur.execute(
|
|
1668
|
+
"INSERT INTO memory_fts(kind,id,date,tags,title,content,path) VALUES (?,?,?,?,?,?,?)",
|
|
1669
|
+
("journal", None, e.get("Date"), tags, e.get("Title"), content, str(path)),
|
|
1670
|
+
)
|
|
1671
|
+
|
|
1672
|
+
digests = mem / "digests"
|
|
1673
|
+
if digests.exists():
|
|
1674
|
+
for p in digests.glob("*.digest.md"):
|
|
1675
|
+
cur.execute(
|
|
1676
|
+
"INSERT INTO memory_fts(kind,id,date,tags,title,content,path) VALUES (?,?,?,?,?,?,?)",
|
|
1677
|
+
("digest", None, None, "", p.name, read_text(p), str(p)),
|
|
1678
|
+
)
|
|
1679
|
+
|
|
1680
|
+
con.commit()
|
|
1681
|
+
con.close()
|
|
1682
|
+
print(f"Built: {out_db}")
|
|
1683
|
+
return 0
|
|
1684
|
+
|
|
1685
|
+
if __name__ == "__main__":
|
|
1686
|
+
raise SystemExit(main())
|
|
1687
|
+
EOF
|
|
1688
|
+
|
|
1689
|
+
write_file "$MEM_SCRIPTS_DIR/query-memory-sqlite.py" <<'EOF'
|
|
1690
|
+
#!/usr/bin/env python3
|
|
1691
|
+
"""Query memory SQLite FTS index."""
|
|
1692
|
+
import argparse
|
|
1693
|
+
import sqlite3
|
|
1694
|
+
from pathlib import Path
|
|
1695
|
+
|
|
1696
|
+
def main():
|
|
1697
|
+
ap = argparse.ArgumentParser()
|
|
1698
|
+
ap.add_argument("--repo", required=True)
|
|
1699
|
+
ap.add_argument("--q", required=True)
|
|
1700
|
+
ap.add_argument("--area", default="All")
|
|
1701
|
+
ap.add_argument("--format", default="Human")
|
|
1702
|
+
args = ap.parse_args()
|
|
1703
|
+
|
|
1704
|
+
repo = Path(args.repo)
|
|
1705
|
+
db = repo / ".cursor" / "memory" / "memory.sqlite"
|
|
1706
|
+
if not db.exists():
|
|
1707
|
+
print("SQLite DB not found. Run rebuild-memory-index.sh first.")
|
|
1708
|
+
return 2
|
|
1709
|
+
|
|
1710
|
+
area = args.area.lower()
|
|
1711
|
+
kind_filter = None
|
|
1712
|
+
if area == "hotrules": kind_filter = "hot_rules"
|
|
1713
|
+
elif area == "active": kind_filter = "active"
|
|
1714
|
+
elif area == "memo": kind_filter = "memo"
|
|
1715
|
+
elif area == "lessons": kind_filter = "lesson"
|
|
1716
|
+
elif area == "journal": kind_filter = "journal"
|
|
1717
|
+
elif area == "digests": kind_filter = "digest"
|
|
1718
|
+
|
|
1719
|
+
con = sqlite3.connect(str(db))
|
|
1720
|
+
cur = con.cursor()
|
|
1721
|
+
|
|
1722
|
+
sql = "SELECT kind, id, date, title, path, snippet(memory_fts, 5, '[', ']', '...', 12) FROM memory_fts WHERE memory_fts MATCH ?"
|
|
1723
|
+
params = [args.q]
|
|
1724
|
+
if kind_filter:
|
|
1725
|
+
sql += " AND kind = ?"
|
|
1726
|
+
params.append(kind_filter)
|
|
1727
|
+
sql += " LIMIT 20"
|
|
1728
|
+
|
|
1729
|
+
rows = cur.execute(sql, params).fetchall()
|
|
1730
|
+
con.close()
|
|
1731
|
+
|
|
1732
|
+
if args.format.lower() == "ai":
|
|
1733
|
+
paths = []
|
|
1734
|
+
for r in rows:
|
|
1735
|
+
p = r[4]
|
|
1736
|
+
try:
|
|
1737
|
+
rel = str(Path(p).resolve().relative_to(repo.resolve()))
|
|
1738
|
+
except Exception:
|
|
1739
|
+
rel = p
|
|
1740
|
+
paths.append(rel.replace("\\", "/"))
|
|
1741
|
+
uniq = []
|
|
1742
|
+
for p in paths:
|
|
1743
|
+
if p not in uniq:
|
|
1744
|
+
uniq.append(p)
|
|
1745
|
+
if not uniq:
|
|
1746
|
+
print(f"No matches for: {args.q}")
|
|
1747
|
+
else:
|
|
1748
|
+
print("Files to read:")
|
|
1749
|
+
for p in uniq:
|
|
1750
|
+
print(f" @{p}")
|
|
1751
|
+
return 0
|
|
1752
|
+
|
|
1753
|
+
if not rows:
|
|
1754
|
+
print(f"No matches for: {args.q}")
|
|
1755
|
+
return 0
|
|
1756
|
+
|
|
1757
|
+
for kind, idv, date, title, path, snip in rows:
|
|
1758
|
+
print(f"==> {kind} | {idv or '-'} | {date or '-'} | {title}")
|
|
1759
|
+
print(f" {path}")
|
|
1760
|
+
print(f" {snip}")
|
|
1761
|
+
print("")
|
|
1762
|
+
return 0
|
|
1763
|
+
|
|
1764
|
+
if __name__ == "__main__":
|
|
1765
|
+
raise SystemExit(main())
|
|
1766
|
+
EOF
|
|
1767
|
+
|
|
1768
|
+
if [ "$ENABLE_VECTOR" = "1" ]; then
|
|
1769
|
+
echo "Vector mode enabled (provider: $VECTOR_PROVIDER)"
|
|
1770
|
+
|
|
1771
|
+
if [ "$DRY_RUN" != "1" ]; then
|
|
1772
|
+
# Robust Python detection: try python3.12, python3.11, python3.10, python3, python
|
|
1773
|
+
PYTHON3_CMD=""
|
|
1774
|
+
for _py_candidate in python3.12 python3.11 python3.10 python3 python; do
|
|
1775
|
+
if command -v "$_py_candidate" >/dev/null 2>&1; then
|
|
1776
|
+
_ver="$($_py_candidate -c 'import sys; print(f"{sys.version_info.major}.{sys.version_info.minor}")' 2>/dev/null || true)"
|
|
1777
|
+
_major="$(echo "$_ver" | cut -d. -f1)"
|
|
1778
|
+
_minor="$(echo "$_ver" | cut -d. -f2)"
|
|
1779
|
+
if [ "${_major:-0}" -ge 3 ] && [ "${_minor:-0}" -ge 10 ] 2>/dev/null; then
|
|
1780
|
+
PYTHON3_CMD="$_py_candidate"
|
|
1781
|
+
break
|
|
1782
|
+
fi
|
|
1783
|
+
fi
|
|
1784
|
+
done
|
|
1785
|
+
if [ -z "$PYTHON3_CMD" ]; then
|
|
1786
|
+
echo "Vector mode requires Python 3.10+ (python3/python not found or version too old)." >&2
|
|
1787
|
+
echo "Install Homebrew Python: brew install python@3.12" >&2
|
|
1788
|
+
exit 1
|
|
1789
|
+
fi
|
|
1790
|
+
|
|
1791
|
+
if ! "$PYTHON3_CMD" -m pip --version >/dev/null 2>&1; then
|
|
1792
|
+
echo "pip is unavailable for $PYTHON3_CMD." >&2
|
|
1793
|
+
echo "Install Homebrew Python (brew install python) or use a virtualenv." >&2
|
|
1794
|
+
exit 1
|
|
1795
|
+
fi
|
|
1796
|
+
|
|
1797
|
+
need_pip_install="1"
|
|
1798
|
+
if [ "$FORCE" != "1" ] && "$PYTHON3_CMD" - "$VECTOR_PROVIDER" <<'PY' >/dev/null 2>&1; then
|
|
1799
|
+
import importlib.util
|
|
1800
|
+
import sys
|
|
1801
|
+
|
|
1802
|
+
provider = sys.argv[1]
|
|
1803
|
+
mods = ["openai", "sqlite_vec", "mcp"]
|
|
1804
|
+
if provider == "gemini":
|
|
1805
|
+
mods.append("google.genai")
|
|
1806
|
+
missing = [m for m in mods if importlib.util.find_spec(m) is None]
|
|
1807
|
+
raise SystemExit(0 if not missing else 1)
|
|
1808
|
+
PY
|
|
1809
|
+
need_pip_install="0"
|
|
1810
|
+
fi
|
|
1811
|
+
|
|
1812
|
+
if [ "$need_pip_install" = "1" ]; then
|
|
1813
|
+
pip_err="${TMPDIR:-/tmp}/mnemo-vector-pip.$$"
|
|
1814
|
+
pkgs="openai sqlite-vec mcp[cli]>=1.2.0,<2.0"
|
|
1815
|
+
if [ "$VECTOR_PROVIDER" = "gemini" ]; then
|
|
1816
|
+
pkgs="$pkgs google-genai"
|
|
1817
|
+
fi
|
|
1818
|
+
|
|
1819
|
+
# shellcheck disable=SC2086
|
|
1820
|
+
if ! "$PYTHON3_CMD" -m pip install --quiet $pkgs 2>"$pip_err"; then
|
|
1821
|
+
if grep -Ei "externally managed|externally-managed" "$pip_err" >/dev/null 2>&1; then
|
|
1822
|
+
echo "Python is externally managed (PEP668)." >&2
|
|
1823
|
+
echo "Use Homebrew Python or a venv, then re-run with --enable-vector." >&2
|
|
1824
|
+
fi
|
|
1825
|
+
cat "$pip_err" >&2 || true
|
|
1826
|
+
rm -f "$pip_err"
|
|
1827
|
+
exit 1
|
|
1828
|
+
fi
|
|
1829
|
+
rm -f "$pip_err"
|
|
1830
|
+
else
|
|
1831
|
+
echo "SKIP (deps installed): vector dependency install"
|
|
1832
|
+
fi
|
|
1833
|
+
else
|
|
1834
|
+
echo "[DRY RUN] Skipping vector dependency checks/install."
|
|
1835
|
+
fi
|
|
1836
|
+
|
|
1837
|
+
# Use template file if running from Mnemo installer repo; else use embedded copy
|
|
1838
|
+
_vector_tpl="$_INSTALLER_DIR/scripts/memory/installer/templates/mnemo_vector.py"
|
|
1839
|
+
if [ -f "$_vector_tpl" ]; then
|
|
1840
|
+
if [ "$DRY_RUN" = "1" ]; then
|
|
1841
|
+
echo "[DRY RUN] WOULD WRITE: $MEM_SCRIPTS_DIR/mnemo_vector.py"
|
|
1842
|
+
elif [ -f "$MEM_SCRIPTS_DIR/mnemo_vector.py" ] && [ "$FORCE" != "1" ]; then
|
|
1843
|
+
echo "SKIP (exists): $MEM_SCRIPTS_DIR/mnemo_vector.py"
|
|
1844
|
+
else
|
|
1845
|
+
cp "$_vector_tpl" "$MEM_SCRIPTS_DIR/mnemo_vector.py"
|
|
1846
|
+
echo "WROTE: $MEM_SCRIPTS_DIR/mnemo_vector.py"
|
|
1847
|
+
fi
|
|
1848
|
+
else
|
|
1849
|
+
write_file "$MEM_SCRIPTS_DIR/mnemo_vector.py" <<'EOF'
|
|
1850
|
+
#!/usr/bin/env python3
|
|
1851
|
+
"""
|
|
1852
|
+
Mnemo vector memory engine (v2 - embedded fallback).
|
|
1853
|
+
Optional semantic layer for .cursor/memory with MCP tools.
|
|
1854
|
+
"""
|
|
1855
|
+
import os
|
|
1856
|
+
import re
|
|
1857
|
+
import sqlite3
|
|
1858
|
+
import hashlib
|
|
1859
|
+
from pathlib import Path
|
|
1860
|
+
|
|
1861
|
+
import sqlite_vec
|
|
1862
|
+
try:
|
|
1863
|
+
from sqlite_vec import serialize_float32 as serialize_f32
|
|
1864
|
+
except ImportError:
|
|
1865
|
+
from sqlite_vec import serialize_f32 # backwards compatibility
|
|
1866
|
+
from mcp.server.fastmcp import FastMCP
|
|
1867
|
+
|
|
1868
|
+
SCHEMA_VERSION = 2
|
|
1869
|
+
EMBED_DIM = 1536
|
|
1870
|
+
MEM_ROOT = Path(".cursor/memory")
|
|
1871
|
+
DB_PATH = MEM_ROOT / "mnemo_vector.sqlite"
|
|
1872
|
+
PROVIDER = os.getenv("MNEMO_PROVIDER", "openai").lower()
|
|
1873
|
+
|
|
1874
|
+
SKIP_NAMES = {
|
|
1875
|
+
"README.md",
|
|
1876
|
+
"index.md",
|
|
1877
|
+
"lessons-index.json",
|
|
1878
|
+
"journal-index.json",
|
|
1879
|
+
"journal-index.md",
|
|
1880
|
+
}
|
|
1881
|
+
SKIP_DIRS = {"legacy", "templates"}
|
|
1882
|
+
MAX_EMBED_CHARS = 12000
|
|
1883
|
+
BATCH_SIZE = 16 if PROVIDER == "gemini" else 64
|
|
1884
|
+
_EMBED_CLIENT = None
|
|
1885
|
+
|
|
1886
|
+
mcp = FastMCP("MnemoVector")
|
|
1887
|
+
|
|
1888
|
+
|
|
1889
|
+
def _trim_for_embedding(text: str) -> str:
|
|
1890
|
+
return text[:MAX_EMBED_CHARS] if len(text) > MAX_EMBED_CHARS else text
|
|
1891
|
+
|
|
1892
|
+
|
|
1893
|
+
def _get_embed_client():
|
|
1894
|
+
global _EMBED_CLIENT
|
|
1895
|
+
if _EMBED_CLIENT is not None:
|
|
1896
|
+
return _EMBED_CLIENT
|
|
1897
|
+
|
|
1898
|
+
if PROVIDER == "gemini":
|
|
1899
|
+
key = os.getenv("GEMINI_API_KEY")
|
|
1900
|
+
if not key:
|
|
1901
|
+
raise RuntimeError("GEMINI_API_KEY is not set")
|
|
1902
|
+
from google import genai
|
|
1903
|
+
_EMBED_CLIENT = genai.Client(api_key=key)
|
|
1904
|
+
return _EMBED_CLIENT
|
|
1905
|
+
|
|
1906
|
+
key = os.getenv("OPENAI_API_KEY")
|
|
1907
|
+
if not key:
|
|
1908
|
+
raise RuntimeError("OPENAI_API_KEY is not set")
|
|
1909
|
+
from openai import OpenAI
|
|
1910
|
+
_EMBED_CLIENT = OpenAI(api_key=key)
|
|
1911
|
+
return _EMBED_CLIENT
|
|
1912
|
+
|
|
1913
|
+
|
|
1914
|
+
def get_embeddings(texts: list[str]) -> list[list[float]]:
|
|
1915
|
+
if not texts:
|
|
1916
|
+
return []
|
|
1917
|
+
trimmed = [_trim_for_embedding(t) for t in texts]
|
|
1918
|
+
client = _get_embed_client()
|
|
1919
|
+
|
|
1920
|
+
if PROVIDER == "gemini":
|
|
1921
|
+
from google.genai import types
|
|
1922
|
+
result = client.models.embed_content(
|
|
1923
|
+
model="gemini-embedding-001",
|
|
1924
|
+
contents=trimmed,
|
|
1925
|
+
config=types.EmbedContentConfig(output_dimensionality=EMBED_DIM),
|
|
1926
|
+
)
|
|
1927
|
+
vectors = [emb.values for emb in result.embeddings]
|
|
1928
|
+
else:
|
|
1929
|
+
resp = client.embeddings.create(input=trimmed, model="text-embedding-3-small")
|
|
1930
|
+
vectors = [item.embedding for item in resp.data]
|
|
1931
|
+
|
|
1932
|
+
if len(vectors) != len(trimmed):
|
|
1933
|
+
raise RuntimeError(f"Embedding provider returned {len(vectors)} vectors for {len(trimmed)} inputs")
|
|
1934
|
+
return vectors
|
|
1935
|
+
|
|
1936
|
+
|
|
1937
|
+
def get_embedding(text: str) -> list[float]:
|
|
1938
|
+
return get_embeddings([text])[0]
|
|
1939
|
+
|
|
1940
|
+
|
|
1941
|
+
def get_db() -> sqlite3.Connection:
|
|
1942
|
+
DB_PATH.parent.mkdir(parents=True, exist_ok=True)
|
|
1943
|
+
db = sqlite3.connect(str(DB_PATH), timeout=30)
|
|
1944
|
+
db.execute("PRAGMA journal_mode=WAL")
|
|
1945
|
+
db.execute("PRAGMA busy_timeout=10000")
|
|
1946
|
+
db.enable_load_extension(True)
|
|
1947
|
+
sqlite_vec.load(db)
|
|
1948
|
+
return db
|
|
1949
|
+
|
|
1950
|
+
|
|
1951
|
+
def init_db() -> sqlite3.Connection:
|
|
1952
|
+
db = get_db()
|
|
1953
|
+
db.execute("CREATE TABLE IF NOT EXISTS schema_info (key TEXT PRIMARY KEY, value TEXT)")
|
|
1954
|
+
row = db.execute("SELECT value FROM schema_info WHERE key='version'").fetchone()
|
|
1955
|
+
ver = int(row[0]) if row else 0
|
|
1956
|
+
|
|
1957
|
+
if ver < SCHEMA_VERSION:
|
|
1958
|
+
db.execute("DROP TABLE IF EXISTS file_meta")
|
|
1959
|
+
db.execute("DROP TABLE IF EXISTS vec_memory")
|
|
1960
|
+
db.execute(
|
|
1961
|
+
"""
|
|
1962
|
+
CREATE TABLE file_meta (
|
|
1963
|
+
path TEXT PRIMARY KEY,
|
|
1964
|
+
hash TEXT NOT NULL,
|
|
1965
|
+
chunk_count INTEGER DEFAULT 0,
|
|
1966
|
+
updated_at REAL DEFAULT (unixepoch('now'))
|
|
1967
|
+
)
|
|
1968
|
+
"""
|
|
1969
|
+
)
|
|
1970
|
+
db.execute(
|
|
1971
|
+
f"""
|
|
1972
|
+
CREATE VIRTUAL TABLE vec_memory USING vec0(
|
|
1973
|
+
embedding float[{EMBED_DIM}] distance_metric=cosine,
|
|
1974
|
+
+ref_path TEXT,
|
|
1975
|
+
+content TEXT,
|
|
1976
|
+
+source_file TEXT
|
|
1977
|
+
)
|
|
1978
|
+
"""
|
|
1979
|
+
)
|
|
1980
|
+
db.execute(
|
|
1981
|
+
"INSERT OR REPLACE INTO schema_info(key, value) VALUES ('version', ?)",
|
|
1982
|
+
(str(SCHEMA_VERSION),),
|
|
1983
|
+
)
|
|
1984
|
+
db.commit()
|
|
1985
|
+
return db
|
|
1986
|
+
|
|
1987
|
+
|
|
1988
|
+
def chunk_markdown(content: str, file_path: Path) -> list[tuple[str, str]]:
|
|
1989
|
+
chunks: list[tuple[str, str]] = []
|
|
1990
|
+
path_str = str(file_path).replace("\\", "/")
|
|
1991
|
+
|
|
1992
|
+
if "journal/" in path_str.lower():
|
|
1993
|
+
parts = re.split(r"^(##\s+\d{4}-\d{2}-\d{2})", content, flags=re.MULTILINE)
|
|
1994
|
+
preamble = parts[0].strip()
|
|
1995
|
+
if preamble:
|
|
1996
|
+
chunks.append((preamble, f"@{path_str}"))
|
|
1997
|
+
i = 1
|
|
1998
|
+
while i < len(parts) - 1:
|
|
1999
|
+
heading = parts[i].strip()
|
|
2000
|
+
body = parts[i + 1].strip()
|
|
2001
|
+
date = heading.replace("##", "").strip()
|
|
2002
|
+
chunks.append((f"{heading}\n{body}".strip(), f"@{path_str}# {date}"))
|
|
2003
|
+
i += 2
|
|
2004
|
+
if chunks:
|
|
2005
|
+
return chunks
|
|
2006
|
+
|
|
2007
|
+
if file_path.parent.name == "lessons" and file_path.name.startswith("L-"):
|
|
2008
|
+
text = content.strip()
|
|
2009
|
+
if text:
|
|
2010
|
+
m = re.match(r"(L-\d{3})", file_path.name)
|
|
2011
|
+
ref = f"@{path_str}# {m.group(1)}" if m else f"@{path_str}"
|
|
2012
|
+
chunks.append((text, ref))
|
|
2013
|
+
return chunks
|
|
2014
|
+
|
|
2015
|
+
parts = re.split(r"^(#{1,4}\s+.+)$", content, flags=re.MULTILINE)
|
|
2016
|
+
preamble = parts[0].strip()
|
|
2017
|
+
if preamble:
|
|
2018
|
+
chunks.append((preamble, f"@{path_str}"))
|
|
2019
|
+
|
|
2020
|
+
i = 1
|
|
2021
|
+
while i < len(parts) - 1:
|
|
2022
|
+
heading_line = parts[i].strip()
|
|
2023
|
+
body = parts[i + 1].strip()
|
|
2024
|
+
heading_text = re.sub(r"^#{1,4}\s+", "", heading_line)
|
|
2025
|
+
full = f"{heading_line}\n{body}".strip() if body else heading_line
|
|
2026
|
+
if full.strip():
|
|
2027
|
+
chunks.append((full, f"@{path_str}# {heading_text}"))
|
|
2028
|
+
i += 2
|
|
2029
|
+
|
|
2030
|
+
if not chunks and content.strip():
|
|
2031
|
+
chunks.append((content.strip(), f"@{path_str}"))
|
|
2032
|
+
return chunks
|
|
2033
|
+
|
|
2034
|
+
|
|
2035
|
+
@mcp.tool()
|
|
2036
|
+
def vector_sync() -> str:
|
|
2037
|
+
db = init_db()
|
|
2038
|
+
files: dict[str, Path] = {}
|
|
2039
|
+
for p in MEM_ROOT.glob("**/*.md"):
|
|
2040
|
+
if p.name in SKIP_NAMES:
|
|
2041
|
+
continue
|
|
2042
|
+
if any(skip in p.parts for skip in SKIP_DIRS):
|
|
2043
|
+
continue
|
|
2044
|
+
files[str(p)] = p
|
|
2045
|
+
|
|
2046
|
+
updated = 0
|
|
2047
|
+
skipped = 0
|
|
2048
|
+
errors = 0
|
|
2049
|
+
known = db.execute("SELECT path FROM file_meta").fetchall()
|
|
2050
|
+
for (stored,) in known:
|
|
2051
|
+
if stored not in files:
|
|
2052
|
+
db.execute("DELETE FROM vec_memory WHERE source_file = ?", (stored,))
|
|
2053
|
+
db.execute("DELETE FROM file_meta WHERE path = ?", (stored,))
|
|
2054
|
+
updated += 1
|
|
2055
|
+
|
|
2056
|
+
for str_path, file_path in files.items():
|
|
2057
|
+
try:
|
|
2058
|
+
content = file_path.read_text(encoding="utf-8-sig")
|
|
2059
|
+
except (UnicodeDecodeError, PermissionError, OSError):
|
|
2060
|
+
errors += 1
|
|
2061
|
+
continue
|
|
2062
|
+
if not content.strip():
|
|
2063
|
+
skipped += 1
|
|
2064
|
+
continue
|
|
2065
|
+
|
|
2066
|
+
f_hash = hashlib.sha256(content.encode("utf-8")).hexdigest()
|
|
2067
|
+
row = db.execute("SELECT hash FROM file_meta WHERE path = ?", (str_path,)).fetchone()
|
|
2068
|
+
if row and row[0] == f_hash:
|
|
2069
|
+
skipped += 1
|
|
2070
|
+
continue
|
|
2071
|
+
|
|
2072
|
+
db.execute("DELETE FROM vec_memory WHERE source_file = ?", (str_path,))
|
|
2073
|
+
chunks = chunk_markdown(content, file_path)
|
|
2074
|
+
embedded = 0
|
|
2075
|
+
chunk_errors = 0
|
|
2076
|
+
for i in range(0, len(chunks), BATCH_SIZE):
|
|
2077
|
+
batch = chunks[i : i + BATCH_SIZE]
|
|
2078
|
+
texts = [text for text, _ in batch]
|
|
2079
|
+
try:
|
|
2080
|
+
vectors = get_embeddings(texts)
|
|
2081
|
+
for (text, ref), emb in zip(batch, vectors):
|
|
2082
|
+
db.execute(
|
|
2083
|
+
"INSERT INTO vec_memory(embedding, ref_path, content, source_file) VALUES (?, ?, ?, ?)",
|
|
2084
|
+
(serialize_f32(emb), ref, text, str_path),
|
|
2085
|
+
)
|
|
2086
|
+
embedded += 1
|
|
2087
|
+
except Exception:
|
|
2088
|
+
for text, ref in batch:
|
|
2089
|
+
try:
|
|
2090
|
+
emb = get_embedding(text)
|
|
2091
|
+
db.execute(
|
|
2092
|
+
"INSERT INTO vec_memory(embedding, ref_path, content, source_file) VALUES (?, ?, ?, ?)",
|
|
2093
|
+
(serialize_f32(emb), ref, text, str_path),
|
|
2094
|
+
)
|
|
2095
|
+
embedded += 1
|
|
2096
|
+
except Exception:
|
|
2097
|
+
chunk_errors += 1
|
|
2098
|
+
|
|
2099
|
+
if chunk_errors == 0:
|
|
2100
|
+
db.execute(
|
|
2101
|
+
"INSERT OR REPLACE INTO file_meta(path, hash, chunk_count, updated_at) VALUES (?, ?, ?, unixepoch('now'))",
|
|
2102
|
+
(str_path, f_hash, embedded),
|
|
2103
|
+
)
|
|
2104
|
+
else:
|
|
2105
|
+
db.execute(
|
|
2106
|
+
"INSERT OR REPLACE INTO file_meta(path, hash, chunk_count, updated_at) VALUES (?, ?, ?, unixepoch('now'))",
|
|
2107
|
+
(str_path, "DIRTY", embedded),
|
|
2108
|
+
)
|
|
2109
|
+
errors += chunk_errors
|
|
2110
|
+
updated += 1
|
|
2111
|
+
|
|
2112
|
+
db.commit()
|
|
2113
|
+
db.close()
|
|
2114
|
+
msg = f"Synced: {updated} files processed, {skipped} unchanged"
|
|
2115
|
+
if errors:
|
|
2116
|
+
msg += f", {errors} chunk errors (will retry)"
|
|
2117
|
+
return msg
|
|
2118
|
+
|
|
2119
|
+
|
|
2120
|
+
@mcp.tool()
|
|
2121
|
+
def vector_search(query: str, top_k: int = 5) -> str:
|
|
2122
|
+
db = init_db()
|
|
2123
|
+
emb = get_embedding(query)
|
|
2124
|
+
rows = db.execute(
|
|
2125
|
+
"SELECT ref_path, content, distance FROM vec_memory WHERE embedding MATCH ? AND k = ? ORDER BY distance",
|
|
2126
|
+
(serialize_f32(emb), top_k),
|
|
2127
|
+
).fetchall()
|
|
2128
|
+
db.close()
|
|
2129
|
+
if not rows:
|
|
2130
|
+
return "No relevant memory found."
|
|
2131
|
+
out = []
|
|
2132
|
+
for ref, content, dist in rows:
|
|
2133
|
+
sim = round(1.0 - dist, 4)
|
|
2134
|
+
preview = " ".join(content[:400].split())
|
|
2135
|
+
out.append(f"[sim={sim:.3f}] {ref}\n{preview}")
|
|
2136
|
+
return "\n\n---\n\n".join(out)
|
|
2137
|
+
|
|
2138
|
+
|
|
2139
|
+
@mcp.tool()
|
|
2140
|
+
def vector_forget(path_pattern: str = "") -> str:
|
|
2141
|
+
db = init_db()
|
|
2142
|
+
removed = 0
|
|
2143
|
+
if path_pattern:
|
|
2144
|
+
like = f"%{path_pattern}%"
|
|
2145
|
+
r1 = db.execute("DELETE FROM vec_memory WHERE source_file LIKE ?", (like,)).rowcount
|
|
2146
|
+
r2 = db.execute("DELETE FROM file_meta WHERE path LIKE ?", (like,)).rowcount
|
|
2147
|
+
removed = max(r1, r2)
|
|
2148
|
+
else:
|
|
2149
|
+
known = db.execute("SELECT path FROM file_meta").fetchall()
|
|
2150
|
+
for (p,) in known:
|
|
2151
|
+
if not Path(p).exists():
|
|
2152
|
+
db.execute("DELETE FROM vec_memory WHERE source_file = ?", (p,))
|
|
2153
|
+
db.execute("DELETE FROM file_meta WHERE path = ?", (p,))
|
|
2154
|
+
removed += 1
|
|
2155
|
+
db.commit()
|
|
2156
|
+
db.close()
|
|
2157
|
+
return f"Pruned {removed} entries."
|
|
2158
|
+
|
|
2159
|
+
|
|
2160
|
+
@mcp.tool()
|
|
2161
|
+
def vector_health() -> str:
|
|
2162
|
+
lines = []
|
|
2163
|
+
db = init_db()
|
|
2164
|
+
ver = db.execute("SELECT value FROM schema_info WHERE key='version'").fetchone()
|
|
2165
|
+
lines.append(f"Schema: v{ver[0] if ver else '?'}")
|
|
2166
|
+
files = db.execute("SELECT COUNT(*) FROM file_meta").fetchone()[0]
|
|
2167
|
+
vecs = db.execute("SELECT COUNT(*) FROM vec_memory").fetchone()[0]
|
|
2168
|
+
dirty = db.execute("SELECT COUNT(*) FROM file_meta WHERE hash = 'DIRTY'").fetchone()[0]
|
|
2169
|
+
lines.append(f"Files tracked: {files}")
|
|
2170
|
+
lines.append(f"Vector chunks: {vecs}")
|
|
2171
|
+
if dirty:
|
|
2172
|
+
lines.append(f"Dirty files: {dirty}")
|
|
2173
|
+
lines.append(f"DB integrity: {db.execute('PRAGMA integrity_check').fetchone()[0]}")
|
|
2174
|
+
db.close()
|
|
2175
|
+
return "\n".join(lines)
|
|
2176
|
+
|
|
2177
|
+
|
|
2178
|
+
if __name__ == "__main__":
|
|
2179
|
+
mcp.run()
|
|
2180
|
+
EOF
|
|
2181
|
+
fi # end: template file check for mnemo_vector.py
|
|
2182
|
+
|
|
2183
|
+
write_file "$RULES_DIR/01-vector-search.mdc" <<'EOF'
|
|
2184
|
+
---
|
|
2185
|
+
description: Mnemo vector semantic retrieval layer (optional)
|
|
2186
|
+
globs:
|
|
2187
|
+
- "**/*"
|
|
2188
|
+
alwaysApply: true
|
|
2189
|
+
---
|
|
2190
|
+
|
|
2191
|
+
# Vector Memory Layer (Optional)
|
|
2192
|
+
|
|
2193
|
+
This rule supplements `00-memory-system.mdc` and does not replace governance.
|
|
2194
|
+
|
|
2195
|
+
## Use vector tools when:
|
|
2196
|
+
- You do not know the exact keyword for prior context.
|
|
2197
|
+
- Keyword/FTS search did not find relevant history.
|
|
2198
|
+
|
|
2199
|
+
## MCP tools
|
|
2200
|
+
- `vector_search` - semantic retrieval with cosine similarity.
|
|
2201
|
+
- `vector_sync` - incremental indexing.
|
|
2202
|
+
- `vector_forget` - remove stale entries.
|
|
2203
|
+
- `vector_health` - DB/API health check.
|
|
2204
|
+
|
|
2205
|
+
## Fallback
|
|
2206
|
+
If vector search is unavailable, keep using:
|
|
2207
|
+
- `scripts/memory/query-memory.sh --query "..."`
|
|
2208
|
+
- `scripts/memory/query-memory.sh --query "..." --use-sqlite`
|
|
2209
|
+
EOF
|
|
2210
|
+
|
|
2211
|
+
if [ "$DRY_RUN" != "1" ]; then
|
|
2212
|
+
mcp_status="$(python3 - "$REPO_ROOT" "$VECTOR_PROVIDER" "$FORCE" "$MNEMO_CURSOR_MCP_PATH" <<'PY'
|
|
2213
|
+
import json
|
|
2214
|
+
import sys
|
|
2215
|
+
from pathlib import Path
|
|
2216
|
+
|
|
2217
|
+
repo = Path(sys.argv[1])
|
|
2218
|
+
provider = sys.argv[2]
|
|
2219
|
+
force = sys.argv[3] == "1"
|
|
2220
|
+
mcp_path = Path(sys.argv[4])
|
|
2221
|
+
engine = str((repo / "scripts" / "memory" / "mnemo_vector.py").resolve())
|
|
2222
|
+
|
|
2223
|
+
root = {}
|
|
2224
|
+
existing_root = None
|
|
2225
|
+
if mcp_path.exists():
|
|
2226
|
+
try:
|
|
2227
|
+
existing_root = json.loads(mcp_path.read_text(encoding="utf-8"))
|
|
2228
|
+
root = dict(existing_root) if isinstance(existing_root, dict) else {}
|
|
2229
|
+
except Exception:
|
|
2230
|
+
root = {}
|
|
2231
|
+
existing_root = None
|
|
2232
|
+
|
|
2233
|
+
servers = root.get("mcpServers") if isinstance(root, dict) else {}
|
|
2234
|
+
if not isinstance(servers, dict):
|
|
2235
|
+
servers = {}
|
|
2236
|
+
|
|
2237
|
+
env = {"MNEMO_PROVIDER": provider}
|
|
2238
|
+
if provider == "gemini":
|
|
2239
|
+
env["GEMINI_API_KEY"] = "${env:GEMINI_API_KEY}"
|
|
2240
|
+
else:
|
|
2241
|
+
env["OPENAI_API_KEY"] = "${env:OPENAI_API_KEY}"
|
|
2242
|
+
|
|
2243
|
+
servers["MnemoVector"] = {
|
|
2244
|
+
"command": "python3",
|
|
2245
|
+
"args": [engine],
|
|
2246
|
+
"env": env,
|
|
2247
|
+
}
|
|
2248
|
+
root["mcpServers"] = servers
|
|
2249
|
+
if (not force) and isinstance(existing_root, dict) and existing_root == root:
|
|
2250
|
+
print("UNCHANGED")
|
|
2251
|
+
raise SystemExit(0)
|
|
2252
|
+
new_content = json.dumps(root, indent=2)
|
|
2253
|
+
if mcp_path.exists():
|
|
2254
|
+
import shutil
|
|
2255
|
+
shutil.copy2(str(mcp_path), str(mcp_path) + ".bak")
|
|
2256
|
+
tmp = str(mcp_path) + ".tmp"
|
|
2257
|
+
Path(tmp).write_text(new_content, encoding="utf-8")
|
|
2258
|
+
Path(tmp).replace(mcp_path)
|
|
2259
|
+
print("UPDATED")
|
|
2260
|
+
PY
|
|
2261
|
+
)"
|
|
2262
|
+
if [ "$mcp_status" = "UNCHANGED" ]; then
|
|
2263
|
+
echo "SKIP (exists): $MNEMO_CURSOR_MCP_PATH (MnemoVector MCP unchanged)"
|
|
2264
|
+
else
|
|
2265
|
+
echo "WROTE: $MNEMO_CURSOR_MCP_PATH"
|
|
2266
|
+
fi
|
|
2267
|
+
|
|
2268
|
+
post_hook="$GITHOOKS_DIR/post-commit"
|
|
2269
|
+
backup_hook="$GITHOOKS_DIR/post-commit.before-mnemo-vector"
|
|
2270
|
+
marker="Mnemo Vector Hook Wrapper"
|
|
2271
|
+
if [ -f "$post_hook" ] && ! grep -Fq "$marker" "$post_hook" 2>/dev/null; then
|
|
2272
|
+
cp "$post_hook" "$backup_hook" 2>/dev/null || true
|
|
2273
|
+
fi
|
|
2274
|
+
|
|
2275
|
+
if [ "$VECTOR_PROVIDER" = "gemini" ]; then
|
|
2276
|
+
api_guard='[ -z "${GEMINI_API_KEY:-}" ] && exit 0'
|
|
2277
|
+
else
|
|
2278
|
+
api_guard='[ -z "${OPENAI_API_KEY:-}" ] && exit 0'
|
|
2279
|
+
fi
|
|
2280
|
+
|
|
2281
|
+
post_tmp="${TMPDIR:-/tmp}/mnemo-post-hook.$$"
|
|
2282
|
+
cat >"$post_tmp" <<EOF
|
|
2283
|
+
#!/bin/sh
|
|
2284
|
+
# Mnemo Vector Hook Wrapper
|
|
2285
|
+
set -e
|
|
2286
|
+
|
|
2287
|
+
ROOT="\$(git rev-parse --show-toplevel 2>/dev/null || pwd)"
|
|
2288
|
+
cd "\$ROOT" || exit 0
|
|
2289
|
+
|
|
2290
|
+
if [ -f ".githooks/post-commit.before-mnemo-vector" ]; then
|
|
2291
|
+
sh ".githooks/post-commit.before-mnemo-vector" || true
|
|
2292
|
+
fi
|
|
2293
|
+
|
|
2294
|
+
$api_guard
|
|
2295
|
+
|
|
2296
|
+
LOCKDIR="\$ROOT/.mnemo/memory/.sync.lock"
|
|
2297
|
+
if [ ! -d "\$ROOT/.mnemo/memory" ] && [ -d "\$ROOT/.cursor/memory" ]; then
|
|
2298
|
+
LOCKDIR="\$ROOT/.cursor/memory/.sync.lock"
|
|
2299
|
+
fi
|
|
2300
|
+
if [ -d "\$LOCKDIR" ]; then
|
|
2301
|
+
NOW=\$(date +%s 2>/dev/null || echo 0)
|
|
2302
|
+
MTIME=\$(stat -f %m "\$LOCKDIR" 2>/dev/null || stat -c %Y "\$LOCKDIR" 2>/dev/null || echo 0)
|
|
2303
|
+
AGE=\$((NOW - MTIME))
|
|
2304
|
+
if [ "\$AGE" -gt 600 ] 2>/dev/null; then
|
|
2305
|
+
rmdir "\$LOCKDIR" 2>/dev/null || true
|
|
2306
|
+
fi
|
|
2307
|
+
fi
|
|
2308
|
+
|
|
2309
|
+
if mkdir "\$LOCKDIR" 2>/dev/null; then
|
|
2310
|
+
trap 'rmdir "\$LOCKDIR" 2>/dev/null || true' EXIT INT TERM
|
|
2311
|
+
python3 -c "import sys; sys.path.insert(0, 'scripts/memory'); from mnemo_vector import vector_sync; print('[MnemoVector]', vector_sync())" 2>&1 | tail -1 || true
|
|
2312
|
+
fi
|
|
2313
|
+
|
|
2314
|
+
exit 0
|
|
2315
|
+
EOF
|
|
2316
|
+
if [ -f "$post_hook" ] && [ "$FORCE" != "1" ] && cmp -s "$post_hook" "$post_tmp" 2>/dev/null; then
|
|
2317
|
+
echo "SKIP (exists): $post_hook"
|
|
2318
|
+
else
|
|
2319
|
+
cp "$post_tmp" "$post_hook" 2>/dev/null || cat "$post_tmp" >"$post_hook"
|
|
2320
|
+
chmod +x "$post_hook" 2>/dev/null || true
|
|
2321
|
+
echo "WROTE: $post_hook"
|
|
2322
|
+
fi
|
|
2323
|
+
rm -f "$post_tmp"
|
|
2324
|
+
|
|
2325
|
+
if [ -d "$REPO_ROOT/.git/hooks" ]; then
|
|
2326
|
+
legacy_post="$REPO_ROOT/.git/hooks/post-commit"
|
|
2327
|
+
if [ -f "$legacy_post" ] && [ "$FORCE" != "1" ] && ! grep -Fq "$marker" "$legacy_post" 2>/dev/null; then
|
|
2328
|
+
echo "SKIP (legacy post-commit exists): $legacy_post"
|
|
2329
|
+
elif [ -f "$legacy_post" ] && [ "$FORCE" != "1" ] && cmp -s "$post_hook" "$legacy_post" 2>/dev/null; then
|
|
2330
|
+
echo "SKIP (exists): $legacy_post"
|
|
2331
|
+
else
|
|
2332
|
+
cp "$post_hook" "$legacy_post" 2>/dev/null || true
|
|
2333
|
+
echo "WROTE: $legacy_post"
|
|
2334
|
+
fi
|
|
2335
|
+
fi
|
|
2336
|
+
else
|
|
2337
|
+
echo "[DRY RUN] WOULD WRITE: $MNEMO_CURSOR_MCP_PATH"
|
|
2338
|
+
echo "[DRY RUN] WOULD CONFIGURE: $GITHOOKS_DIR/post-commit (MnemoVector wrapper)"
|
|
2339
|
+
fi
|
|
2340
|
+
fi
|
|
2341
|
+
|
|
2342
|
+
# Update .gitignore with memory artifacts (marker-based, idempotent)
|
|
2343
|
+
gi="$REPO_ROOT/.gitignore"
|
|
2344
|
+
GI_BEGIN="# >>> Mnemo (generated) - do not edit this block manually <<<"
|
|
2345
|
+
GI_END="# <<< Mnemo (generated) >>>"
|
|
2346
|
+
|
|
2347
|
+
ignore_lines=".mnemo/memory/memory.sqlite
|
|
2348
|
+
.cursor/memory/memory.sqlite
|
|
2349
|
+
.mnemo/mcp/cursor.mcp.json
|
|
2350
|
+
.cursor/mcp.json"
|
|
2351
|
+
if [ "$ENABLE_VECTOR" = "1" ]; then
|
|
2352
|
+
ignore_lines="$ignore_lines
|
|
2353
|
+
.mnemo/memory/mnemo_vector.sqlite
|
|
2354
|
+
.mnemo/memory/mnemo_vector.sqlite-journal
|
|
2355
|
+
.mnemo/memory/mnemo_vector.sqlite-wal
|
|
2356
|
+
.mnemo/memory/mnemo_vector.sqlite-shm
|
|
2357
|
+
.mnemo/memory/.sync.lock
|
|
2358
|
+
.mnemo/memory/.autonomy/
|
|
2359
|
+
.cursor/memory/mnemo_vector.sqlite
|
|
2360
|
+
.cursor/memory/mnemo_vector.sqlite-journal
|
|
2361
|
+
.cursor/memory/mnemo_vector.sqlite-wal
|
|
2362
|
+
.cursor/memory/mnemo_vector.sqlite-shm
|
|
2363
|
+
.cursor/memory/.sync.lock
|
|
2364
|
+
.cursor/memory/.autonomy/"
|
|
2365
|
+
fi
|
|
2366
|
+
|
|
2367
|
+
if [ "$DRY_RUN" = "1" ]; then
|
|
2368
|
+
echo "[DRY RUN] WOULD UPDATE: $gi (managed Mnemo block)"
|
|
2369
|
+
else
|
|
2370
|
+
new_block="$GI_BEGIN
|
|
2371
|
+
$ignore_lines
|
|
2372
|
+
$GI_END"
|
|
2373
|
+
|
|
2374
|
+
if [ ! -f "$gi" ]; then
|
|
2375
|
+
printf "%s\n" "$new_block" >"$gi"
|
|
2376
|
+
echo "Created .gitignore with Mnemo managed block"
|
|
2377
|
+
elif grep -qF "$GI_BEGIN" "$gi" 2>/dev/null; then
|
|
2378
|
+
# Replace existing managed block using awk (POSIX, no temp file race)
|
|
2379
|
+
awk -v begin="$GI_BEGIN" -v block="$new_block" '
|
|
2380
|
+
BEGIN { skipping=0; done=0 }
|
|
2381
|
+
$0 == begin { skipping=1 }
|
|
2382
|
+
skipping && /^# <<< Mnemo \(generated\) >>>/ {
|
|
2383
|
+
skipping=0
|
|
2384
|
+
if (!done) { print block; done=1 }
|
|
2385
|
+
next
|
|
2386
|
+
}
|
|
2387
|
+
!skipping { print }
|
|
2388
|
+
' "$gi" > "$gi.tmp.$$" && mv "$gi.tmp.$$" "$gi"
|
|
2389
|
+
echo "Updated .gitignore managed block"
|
|
2390
|
+
else
|
|
2391
|
+
printf "\n%s\n" "$new_block" >> "$gi"
|
|
2392
|
+
echo "Added Mnemo managed block to .gitignore"
|
|
2393
|
+
fi
|
|
2394
|
+
fi
|
|
2395
|
+
|
|
2396
|
+
# Ensure permanent compatibility bridges are present and healthy.
|
|
2397
|
+
ensure_mnemo_bridges
|
|
2398
|
+
|
|
2399
|
+
chmod +x "$MEM_SCRIPTS_DIR/"*.sh "$GITHOOKS_DIR/pre-commit" "$GITHOOKS_DIR/post-commit" 2>/dev/null || true
|
|
2400
|
+
|
|
2401
|
+
# Auto-configure portable hooks path (removes manual step)
|
|
2402
|
+
if [ "$DRY_RUN" != "1" ] && [ -d "$REPO_ROOT/.git" ]; then
|
|
2403
|
+
_current_hp="$(git -C "$REPO_ROOT" config core.hooksPath 2>/dev/null || true)"
|
|
2404
|
+
if [ "$_current_hp" != ".githooks" ]; then
|
|
2405
|
+
git -C "$REPO_ROOT" config core.hooksPath .githooks 2>/dev/null || true
|
|
2406
|
+
echo "Configured: git config core.hooksPath .githooks"
|
|
2407
|
+
fi
|
|
2408
|
+
fi
|
|
2409
|
+
|
|
2410
|
+
# Copy autonomy modules from installer templates if available and vector is enabled
|
|
2411
|
+
if [ "$ENABLE_VECTOR" = "1" ] && [ "$DRY_RUN" != "1" ]; then
|
|
2412
|
+
_autonomy_tpl="$_INSTALLER_DIR/scripts/memory/installer/templates/autonomy"
|
|
2413
|
+
_autonomy_dest="$MEM_SCRIPTS_DIR/autonomy"
|
|
2414
|
+
mkdir -p "$_autonomy_dest"
|
|
2415
|
+
if [ -d "$_autonomy_tpl" ]; then
|
|
2416
|
+
_autonomy_missing=0
|
|
2417
|
+
for _f in __init__.py schema.py runner.py ingest_pipeline.py lifecycle_engine.py entity_resolver.py retrieval_router.py reranker.py context_safety.py vault_policy.py policies.yaml; do
|
|
2418
|
+
if [ ! -f "$_autonomy_dest/$_f" ]; then
|
|
2419
|
+
_autonomy_missing=1
|
|
2420
|
+
break
|
|
2421
|
+
fi
|
|
2422
|
+
done
|
|
2423
|
+
if [ "$FORCE" = "1" ] || [ "$_autonomy_missing" = "1" ]; then
|
|
2424
|
+
cp -r "$_autonomy_tpl/." "$_autonomy_dest/" 2>/dev/null || true
|
|
2425
|
+
echo "WROTE: $MEM_SCRIPTS_DIR/autonomy/ (autonomy runtime modules)"
|
|
2426
|
+
else
|
|
2427
|
+
echo "SKIP (exists): $MEM_SCRIPTS_DIR/autonomy/ (autonomy runtime modules)"
|
|
2428
|
+
fi
|
|
2429
|
+
fi
|
|
2430
|
+
fi
|
|
2431
|
+
|
|
2432
|
+
echo ""
|
|
2433
|
+
echo "Setup complete. (Mnemo v$MNEMO_VERSION)"
|
|
2434
|
+
echo "Next:"
|
|
2435
|
+
echo " sh ./scripts/memory/rebuild-memory-index.sh"
|
|
2436
|
+
echo " sh ./scripts/memory/lint-memory.sh"
|
|
2437
|
+
if [ "$ENABLE_VECTOR" = "1" ] && [ "$DRY_RUN" != "1" ]; then
|
|
2438
|
+
echo " restart Cursor, then run: vector_health and vector_sync"
|
|
2439
|
+
echo ""
|
|
2440
|
+
echo "Vector tools enabled: vector_search, vector_sync, vector_forget, vector_health"
|
|
2441
|
+
echo "Important: post-commit uses shell env vars (export OPENAI_API_KEY/GEMINI_API_KEY)."
|
|
2442
|
+
elif [ "$ENABLE_VECTOR" = "1" ] && [ "$DRY_RUN" = "1" ]; then
|
|
2443
|
+
echo " (dry run) vector setup preview only; no MCP/hooks changed"
|
|
2444
|
+
echo ""
|
|
2445
|
+
echo "Vector tools previewed (dry run): no dependencies installed and no MCP/hooks were modified."
|
|
2446
|
+
fi
|
|
2447
|
+
|