@lmaksym/agent-mem 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/commands/context.md +24 -0
- package/.claude/skills/agent-mem/SKILL.md +66 -0
- package/.claude/skills/agent-mem/references/branching-merging.md +34 -0
- package/.claude/skills/agent-mem/references/coexistence.md +19 -0
- package/.claude/skills/agent-mem/references/collaboration.md +33 -0
- package/.claude/skills/agent-mem/references/reflection-compaction.md +104 -0
- package/.claude/skills/agent-mem/references/sub-agent-patterns.md +60 -0
- package/LICENSE +21 -0
- package/README.md +235 -0
- package/bin/agent-context.js +95 -0
- package/bin/parse-args.js +85 -0
- package/package.json +58 -0
- package/src/commands/branch.js +57 -0
- package/src/commands/branch.test.js +91 -0
- package/src/commands/branches.js +34 -0
- package/src/commands/commit.js +55 -0
- package/src/commands/compact.js +307 -0
- package/src/commands/compact.test.js +110 -0
- package/src/commands/config.js +47 -0
- package/src/commands/core.test.js +166 -0
- package/src/commands/diff.js +157 -0
- package/src/commands/diff.test.js +64 -0
- package/src/commands/forget.js +77 -0
- package/src/commands/forget.test.js +68 -0
- package/src/commands/help.js +99 -0
- package/src/commands/import.js +83 -0
- package/src/commands/init.js +269 -0
- package/src/commands/init.test.js +80 -0
- package/src/commands/lesson.js +95 -0
- package/src/commands/lesson.test.js +93 -0
- package/src/commands/merge.js +105 -0
- package/src/commands/pin.js +34 -0
- package/src/commands/pull.js +80 -0
- package/src/commands/push.js +80 -0
- package/src/commands/read.js +62 -0
- package/src/commands/reflect.js +328 -0
- package/src/commands/remember.js +95 -0
- package/src/commands/resolve.js +230 -0
- package/src/commands/resolve.test.js +167 -0
- package/src/commands/search.js +70 -0
- package/src/commands/share.js +65 -0
- package/src/commands/snapshot.js +106 -0
- package/src/commands/status.js +37 -0
- package/src/commands/switch.js +31 -0
- package/src/commands/sync.js +328 -0
- package/src/commands/track.js +61 -0
- package/src/commands/unpin.js +28 -0
- package/src/commands/write.js +58 -0
- package/src/core/auto-commit.js +22 -0
- package/src/core/config.js +93 -0
- package/src/core/context-root.js +28 -0
- package/src/core/fs.js +137 -0
- package/src/core/git.js +182 -0
- package/src/core/importers.js +210 -0
- package/src/core/lock.js +62 -0
- package/src/core/reflect-defrag.js +287 -0
- package/src/core/reflect-gather.js +360 -0
- package/src/core/reflect-parse.js +168 -0
|
@@ -0,0 +1,287 @@
|
|
|
1
|
+
import { existsSync, readdirSync, statSync } from 'node:fs';
|
|
2
|
+
import { join } from 'node:path';
|
|
3
|
+
import { readContextFile, writeContextFile, parseFrontmatter } from './fs.js';
|
|
4
|
+
import { readConfig } from './config.js';
|
|
5
|
+
import { commitContext } from './git.js';
|
|
6
|
+
import { getReflectionFiles } from './reflect-gather.js';
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Tokenize a string into a set of lowercase words.
|
|
10
|
+
*/
|
|
11
|
+
function wordSet(text) {
|
|
12
|
+
return new Set(
|
|
13
|
+
text
|
|
14
|
+
.toLowerCase()
|
|
15
|
+
.replace(/[^a-z0-9\s]/g, ' ')
|
|
16
|
+
.split(/\s+/)
|
|
17
|
+
.filter((w) => w.length > 2),
|
|
18
|
+
);
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
/**
|
|
22
|
+
* Jaccard similarity between two word sets.
|
|
23
|
+
*/
|
|
24
|
+
function jaccard(setA, setB) {
|
|
25
|
+
if (setA.size === 0 && setB.size === 0) return 0;
|
|
26
|
+
let intersection = 0;
|
|
27
|
+
for (const w of setA) {
|
|
28
|
+
if (setB.has(w)) intersection++;
|
|
29
|
+
}
|
|
30
|
+
const union = setA.size + setB.size - intersection;
|
|
31
|
+
return union === 0 ? 0 : intersection / union;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Parse entry lines from a memory file. Returns array of { line, date, text }.
|
|
36
|
+
*/
|
|
37
|
+
function parseEntries(content) {
|
|
38
|
+
if (!content) return [];
|
|
39
|
+
const entries = [];
|
|
40
|
+
const lines = content.split('\n');
|
|
41
|
+
for (let i = 0; i < lines.length; i++) {
|
|
42
|
+
const match = lines[i].match(/^- \[(\d{4}-\d{2}-\d{2})[\s\d:]*\]\s*(.+)/);
|
|
43
|
+
if (match) {
|
|
44
|
+
entries.push({ line: i + 1, date: match[1], text: match[2] });
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
return entries;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Get all text from reflections that "reaffirm" entries (Decisions Validated, Patterns Identified).
|
|
52
|
+
*/
|
|
53
|
+
function getReaffirmedText(ctxDir) {
|
|
54
|
+
const files = getReflectionFiles(ctxDir);
|
|
55
|
+
const texts = [];
|
|
56
|
+
for (const f of files) {
|
|
57
|
+
const raw = readContextFile(ctxDir, f);
|
|
58
|
+
if (!raw) continue;
|
|
59
|
+
// Extract relevant sections
|
|
60
|
+
const content = parseFrontmatter(raw).content;
|
|
61
|
+
const sectionMatch = content.match(
|
|
62
|
+
/## (?:Patterns Identified|Decisions Validated)\n([\s\S]*?)(?=\n## |\n$|$)/g,
|
|
63
|
+
);
|
|
64
|
+
if (sectionMatch) {
|
|
65
|
+
texts.push(...sectionMatch);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
return texts.join('\n').toLowerCase();
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
/**
|
|
72
|
+
* Run defrag analysis on memory files.
|
|
73
|
+
* Returns { oversized, duplicates, stale, structural, health, issueCount }.
|
|
74
|
+
*/
|
|
75
|
+
export function analyzeDefrag(ctxDir) {
|
|
76
|
+
const config = readConfig(ctxDir);
|
|
77
|
+
const threshold = config.reflection?.defrag_threshold || 50;
|
|
78
|
+
const sizeKb = config.reflection?.defrag_size_kb || 10;
|
|
79
|
+
const staleDays = config.reflection?.stale_days || 30;
|
|
80
|
+
|
|
81
|
+
const memDir = join(ctxDir, 'memory');
|
|
82
|
+
if (!existsSync(memDir)) {
|
|
83
|
+
return {
|
|
84
|
+
oversized: [],
|
|
85
|
+
duplicates: [],
|
|
86
|
+
stale: [],
|
|
87
|
+
structural: [],
|
|
88
|
+
health: 'GOOD',
|
|
89
|
+
issueCount: 0,
|
|
90
|
+
};
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
const files = readdirSync(memDir)
|
|
94
|
+
.filter((n) => n.endsWith('.md') && !n.startsWith('.'))
|
|
95
|
+
.sort();
|
|
96
|
+
|
|
97
|
+
const oversized = [];
|
|
98
|
+
const duplicates = [];
|
|
99
|
+
const stale = [];
|
|
100
|
+
const structural = [];
|
|
101
|
+
|
|
102
|
+
const reaffirmed = getReaffirmedText(ctxDir);
|
|
103
|
+
const now = new Date();
|
|
104
|
+
const staleThreshold = new Date(now.getTime() - staleDays * 24 * 60 * 60 * 1000);
|
|
105
|
+
|
|
106
|
+
for (const name of files) {
|
|
107
|
+
const relPath = `memory/${name}`;
|
|
108
|
+
const content = readContextFile(ctxDir, relPath);
|
|
109
|
+
if (!content) continue;
|
|
110
|
+
|
|
111
|
+
const stat = statSync(join(memDir, name));
|
|
112
|
+
const entries = parseEntries(content);
|
|
113
|
+
const { description } = parseFrontmatter(content);
|
|
114
|
+
|
|
115
|
+
// Check oversized
|
|
116
|
+
if (entries.length > threshold || stat.size > sizeKb * 1024) {
|
|
117
|
+
oversized.push({
|
|
118
|
+
file: relPath,
|
|
119
|
+
entries: entries.length,
|
|
120
|
+
size: stat.size,
|
|
121
|
+
sizeFormatted: `${(stat.size / 1024).toFixed(1)}KB`,
|
|
122
|
+
});
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// Check structural issues
|
|
126
|
+
if (!description && entries.length > 0) {
|
|
127
|
+
structural.push({ file: relPath, issue: 'missing frontmatter (description)' });
|
|
128
|
+
}
|
|
129
|
+
if (entries.length === 0 && content.trim().split('\n').length <= 6) {
|
|
130
|
+
structural.push({ file: relPath, issue: 'empty (no entries)' });
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
// Check duplicates within the same file
|
|
134
|
+
for (let i = 0; i < entries.length; i++) {
|
|
135
|
+
const wordsI = wordSet(entries[i].text);
|
|
136
|
+
for (let j = i + 1; j < entries.length; j++) {
|
|
137
|
+
const wordsJ = wordSet(entries[j].text);
|
|
138
|
+
const sim = jaccard(wordsI, wordsJ);
|
|
139
|
+
if (sim >= 0.6) {
|
|
140
|
+
duplicates.push({
|
|
141
|
+
file: relPath,
|
|
142
|
+
lineA: entries[i].line,
|
|
143
|
+
lineB: entries[j].line,
|
|
144
|
+
textA: entries[i].text.slice(0, 80),
|
|
145
|
+
textB: entries[j].text.slice(0, 80),
|
|
146
|
+
similarity: sim,
|
|
147
|
+
});
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// Check stale entries
|
|
153
|
+
for (const entry of entries) {
|
|
154
|
+
const entryDate = new Date(entry.date);
|
|
155
|
+
if (entryDate < staleThreshold) {
|
|
156
|
+
// Check if this entry was reaffirmed in any reflection
|
|
157
|
+
const entryLower = entry.text.toLowerCase();
|
|
158
|
+
const isReaffirmed = reaffirmed.includes(entryLower.slice(0, 30));
|
|
159
|
+
if (!isReaffirmed) {
|
|
160
|
+
stale.push({
|
|
161
|
+
file: relPath,
|
|
162
|
+
line: entry.line,
|
|
163
|
+
date: entry.date,
|
|
164
|
+
text: entry.text.slice(0, 80),
|
|
165
|
+
});
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
const issueCount = oversized.length + duplicates.length + stale.length + structural.length;
|
|
172
|
+
const health = issueCount === 0 ? 'GOOD' : issueCount <= 3 ? 'FAIR' : 'NEEDS_ATTENTION';
|
|
173
|
+
|
|
174
|
+
return { oversized, duplicates, stale, structural, health, issueCount };
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
/**
|
|
178
|
+
* Format defrag analysis as output text.
|
|
179
|
+
*/
|
|
180
|
+
export function formatDefragOutput(analysis, isDryRun) {
|
|
181
|
+
const lines = [];
|
|
182
|
+
const marker = isDryRun ? ' (dry run — no changes)' : '';
|
|
183
|
+
lines.push(`🔧 DEFRAG ANALYSIS${marker}`);
|
|
184
|
+
lines.push('');
|
|
185
|
+
|
|
186
|
+
// Oversized
|
|
187
|
+
if (analysis.oversized.length > 0) {
|
|
188
|
+
lines.push('OVERSIZED FILES:');
|
|
189
|
+
for (const o of analysis.oversized) {
|
|
190
|
+
lines.push(` ${o.file} — ${o.entries} entries, ${o.sizeFormatted}`);
|
|
191
|
+
lines.push(` ⤷ Suggest: split by domain or time period`);
|
|
192
|
+
}
|
|
193
|
+
lines.push('');
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
// Duplicates
|
|
197
|
+
if (analysis.duplicates.length > 0) {
|
|
198
|
+
lines.push('POTENTIAL DUPLICATES:');
|
|
199
|
+
for (const d of analysis.duplicates) {
|
|
200
|
+
lines.push(
|
|
201
|
+
` ${d.file} line ${d.lineA} ↔ line ${d.lineB} (${(d.similarity * 100).toFixed(0)}% similar)`,
|
|
202
|
+
);
|
|
203
|
+
lines.push(` "${d.textA}"`);
|
|
204
|
+
lines.push(` "${d.textB}"`);
|
|
205
|
+
lines.push(` ⤷ Suggest: merge into single, more specific entry`);
|
|
206
|
+
}
|
|
207
|
+
lines.push('');
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
// Stale
|
|
211
|
+
if (analysis.stale.length > 0) {
|
|
212
|
+
lines.push(`STALE ENTRIES (>${analysis.stale[0] ? '' : ''}30 days, unreferenced):`);
|
|
213
|
+
for (const s of analysis.stale) {
|
|
214
|
+
lines.push(` ${s.file} line ${s.line} — [${s.date}] "${s.text}"`);
|
|
215
|
+
}
|
|
216
|
+
lines.push(` ⤷ Suggest: archive or remove if no longer relevant`);
|
|
217
|
+
lines.push('');
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
// Structural
|
|
221
|
+
if (analysis.structural.length > 0) {
|
|
222
|
+
lines.push('STRUCTURAL ISSUES:');
|
|
223
|
+
for (const s of analysis.structural) {
|
|
224
|
+
lines.push(` ${s.file} — ${s.issue}`);
|
|
225
|
+
}
|
|
226
|
+
lines.push('');
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
if (analysis.issueCount === 0) {
|
|
230
|
+
lines.push('No issues found.');
|
|
231
|
+
lines.push('');
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
lines.push(`Memory health: ${analysis.health} (${analysis.issueCount} issues)`);
|
|
235
|
+
lines.push('');
|
|
236
|
+
|
|
237
|
+
// Instructions
|
|
238
|
+
lines.push('═══ INSTRUCTIONS ═══');
|
|
239
|
+
lines.push('To act on suggestions:');
|
|
240
|
+
lines.push(' amem write memory/<file>.md "<cleaned content>"');
|
|
241
|
+
lines.push(' amem commit "defrag: reorganized memory files"');
|
|
242
|
+
lines.push('');
|
|
243
|
+
lines.push('Or run a full reflect cycle to address holistically:');
|
|
244
|
+
lines.push(' amem reflect gather --deep');
|
|
245
|
+
|
|
246
|
+
return lines.join('\n');
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
/**
|
|
250
|
+
* Apply non-destructive stale markers to flagged entries.
|
|
251
|
+
* Returns count of entries marked.
|
|
252
|
+
*/
|
|
253
|
+
export function applyStaleMarkers(ctxDir, staleEntries) {
|
|
254
|
+
if (!staleEntries.length) return 0;
|
|
255
|
+
|
|
256
|
+
const today = new Date().toISOString().slice(0, 10);
|
|
257
|
+
let marked = 0;
|
|
258
|
+
|
|
259
|
+
// Group by file
|
|
260
|
+
const byFile = {};
|
|
261
|
+
for (const entry of staleEntries) {
|
|
262
|
+
if (!byFile[entry.file]) byFile[entry.file] = [];
|
|
263
|
+
byFile[entry.file].push(entry);
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
for (const [file, entries] of Object.entries(byFile)) {
|
|
267
|
+
let content = readContextFile(ctxDir, file);
|
|
268
|
+
if (!content) continue;
|
|
269
|
+
|
|
270
|
+
const lines = content.split('\n');
|
|
271
|
+
// Process in reverse order so line numbers stay valid
|
|
272
|
+
const lineNums = entries.map((e) => e.line).sort((a, b) => b - a);
|
|
273
|
+
|
|
274
|
+
for (const lineNum of lineNums) {
|
|
275
|
+
const idx = lineNum - 1;
|
|
276
|
+
if (idx >= 0 && idx < lines.length && !lines[idx].includes('[STALE]')) {
|
|
277
|
+
// Insert stale marker after the entry line
|
|
278
|
+
lines.splice(idx + 1, 0, `- [${today} STALE] ^^^ flagged by reflection — may be outdated`);
|
|
279
|
+
marked++;
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
writeContextFile(ctxDir, file, lines.join('\n'));
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
return marked;
|
|
287
|
+
}
|
|
@@ -0,0 +1,360 @@
|
|
|
1
|
+
import { existsSync, readdirSync, statSync, readFileSync } from 'node:fs';
|
|
2
|
+
import { join } from 'node:path';
|
|
3
|
+
import { readContextFile, writeContextFile, parseFrontmatter, listFiles } from './fs.js';
|
|
4
|
+
import { readConfig } from './config.js';
|
|
5
|
+
import {
|
|
6
|
+
gitLog,
|
|
7
|
+
gitDiffStat,
|
|
8
|
+
gitDiffFiles,
|
|
9
|
+
commitCountSince,
|
|
10
|
+
firstCommit,
|
|
11
|
+
lastCommit,
|
|
12
|
+
} from './git.js';
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Determine the "since" reference for the reflection window.
|
|
16
|
+
* Priority: --since flag > last reflection's last_commit_hash > first commit
|
|
17
|
+
*/
|
|
18
|
+
export function resolveSinceRef(ctxDir, flags) {
|
|
19
|
+
// Explicit --since flag
|
|
20
|
+
if (flags.since) return flags.since;
|
|
21
|
+
|
|
22
|
+
// Check .reflect-state.json for last saved state
|
|
23
|
+
const stateRaw = readContextFile(ctxDir, '.reflect-state.json');
|
|
24
|
+
if (stateRaw) {
|
|
25
|
+
try {
|
|
26
|
+
const state = JSON.parse(stateRaw);
|
|
27
|
+
if (state.last_commit_hash) return state.last_commit_hash;
|
|
28
|
+
} catch {}
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
// Check most recent reflection file's frontmatter
|
|
32
|
+
const reflections = getReflectionFiles(ctxDir);
|
|
33
|
+
if (reflections.length > 0) {
|
|
34
|
+
const latest = readContextFile(ctxDir, reflections[reflections.length - 1]);
|
|
35
|
+
if (latest) {
|
|
36
|
+
const { description } = parseFrontmatter(latest);
|
|
37
|
+
// parseFrontmatter gives us description but we need the raw frontmatter
|
|
38
|
+
const match = latest.match(/^---\n([\s\S]*?)\n---/);
|
|
39
|
+
if (match) {
|
|
40
|
+
const hashMatch = match[1].match(/last_commit_hash:\s*['"]?(\w+)['"]?/);
|
|
41
|
+
if (hashMatch) return hashMatch[1];
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// Fall back to first commit
|
|
47
|
+
return firstCommit(ctxDir);
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Get sorted list of reflection file paths (relative to ctxDir).
|
|
52
|
+
*/
|
|
53
|
+
export function getReflectionFiles(ctxDir) {
|
|
54
|
+
const dir = join(ctxDir, 'reflections');
|
|
55
|
+
if (!existsSync(dir)) return [];
|
|
56
|
+
return readdirSync(dir)
|
|
57
|
+
.filter((n) => n.endsWith('.md') && !n.startsWith('.'))
|
|
58
|
+
.sort()
|
|
59
|
+
.map((n) => `reflections/${n}`);
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* Count entries in a memory file (lines matching `^- [` or `^### [`).
|
|
64
|
+
*/
|
|
65
|
+
function countEntries(content) {
|
|
66
|
+
if (!content) return 0;
|
|
67
|
+
return content.split('\n').filter((l) => /^- \[/.test(l) || /^### \[/.test(l)).length;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Get last entry date from a memory file.
|
|
72
|
+
*/
|
|
73
|
+
function lastEntryDate(content) {
|
|
74
|
+
if (!content) return null;
|
|
75
|
+
const lines = content.split('\n').filter((l) => /^- \[/.test(l) || /^### \[/.test(l));
|
|
76
|
+
if (!lines.length) return null;
|
|
77
|
+
const match = lines[lines.length - 1].match(/^(?:- |### )\[(\d{4}-\d{2}-\d{2})/);
|
|
78
|
+
return match ? match[1] : null;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
/**
|
|
82
|
+
* Analyze all memory files. Returns array of { file, entries, size, lastDate, content }.
|
|
83
|
+
*/
|
|
84
|
+
function analyzeMemoryFiles(ctxDir) {
|
|
85
|
+
const memDir = join(ctxDir, 'memory');
|
|
86
|
+
if (!existsSync(memDir)) return [];
|
|
87
|
+
|
|
88
|
+
return readdirSync(memDir)
|
|
89
|
+
.filter((n) => n.endsWith('.md') && !n.startsWith('.'))
|
|
90
|
+
.sort()
|
|
91
|
+
.map((name) => {
|
|
92
|
+
const content = readContextFile(ctxDir, `memory/${name}`);
|
|
93
|
+
const stat = statSync(join(memDir, name));
|
|
94
|
+
return {
|
|
95
|
+
file: `memory/${name}`,
|
|
96
|
+
entries: countEntries(content),
|
|
97
|
+
size: stat.size,
|
|
98
|
+
lastDate: lastEntryDate(content),
|
|
99
|
+
content,
|
|
100
|
+
};
|
|
101
|
+
});
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
/**
|
|
105
|
+
* Analyze branch lifecycle. Returns { active, merged }.
|
|
106
|
+
*/
|
|
107
|
+
function analyzeBranches(ctxDir) {
|
|
108
|
+
const branchDir = join(ctxDir, 'branches');
|
|
109
|
+
if (!existsSync(branchDir)) return { active: [], merged: [] };
|
|
110
|
+
|
|
111
|
+
const config = readConfig(ctxDir);
|
|
112
|
+
const active = [];
|
|
113
|
+
const merged = [];
|
|
114
|
+
|
|
115
|
+
for (const name of readdirSync(branchDir).filter((n) => !n.startsWith('.'))) {
|
|
116
|
+
const purposeRaw = readContextFile(ctxDir, `branches/${name}/purpose.md`);
|
|
117
|
+
const purpose = purposeRaw
|
|
118
|
+
? purposeRaw
|
|
119
|
+
.split('\n')
|
|
120
|
+
.filter((l) => l.trim() && !l.startsWith('#') && !l.startsWith('---'))[0]
|
|
121
|
+
?.trim() || name
|
|
122
|
+
: name;
|
|
123
|
+
|
|
124
|
+
// Check if branch was merged (look for merge entry in decisions.md)
|
|
125
|
+
const decisions = readContextFile(ctxDir, 'memory/decisions.md') || '';
|
|
126
|
+
const mergePattern = new RegExp(`Merged branch: ${name}`, 'i');
|
|
127
|
+
if (mergePattern.test(decisions)) {
|
|
128
|
+
merged.push({ name, purpose });
|
|
129
|
+
} else {
|
|
130
|
+
active.push({ name, purpose, isCurrent: config.branch === name });
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
return { active, merged };
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* Get the last reflection summary text.
|
|
139
|
+
*/
|
|
140
|
+
function getLastReflectionSummary(ctxDir) {
|
|
141
|
+
const files = getReflectionFiles(ctxDir);
|
|
142
|
+
if (!files.length) return null;
|
|
143
|
+
|
|
144
|
+
const content = readContextFile(ctxDir, files[files.length - 1]);
|
|
145
|
+
if (!content) return null;
|
|
146
|
+
|
|
147
|
+
const { content: body } = parseFrontmatter(content);
|
|
148
|
+
// Extract Summary section
|
|
149
|
+
const match = body.match(/## Summary\n([\s\S]*?)(?=\n## |\n$|$)/);
|
|
150
|
+
const summary = match ? match[1].trim() : null;
|
|
151
|
+
|
|
152
|
+
// Extract frontmatter date
|
|
153
|
+
const dateMatch = content.match(/date:\s*['"]?(\d{4}-\d{2}-\d{2})['"]?/);
|
|
154
|
+
const date = dateMatch
|
|
155
|
+
? dateMatch[1]
|
|
156
|
+
: files[files.length - 1].replace('reflections/', '').replace('.md', '');
|
|
157
|
+
|
|
158
|
+
return { date, file: files[files.length - 1], summary };
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
/**
|
|
162
|
+
* Format file size for display.
|
|
163
|
+
*/
|
|
164
|
+
function formatSize(bytes) {
|
|
165
|
+
if (bytes < 1024) return `${bytes}B`;
|
|
166
|
+
return `${(bytes / 1024).toFixed(1)}KB`;
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
/**
|
|
170
|
+
* Gather all reflection data and format the prompt.
|
|
171
|
+
*/
|
|
172
|
+
export function gatherReflectionPrompt(ctxDir, flags) {
|
|
173
|
+
const sinceRef = resolveSinceRef(ctxDir, flags);
|
|
174
|
+
const totalCommits = commitCountSince(ctxDir, sinceRef);
|
|
175
|
+
|
|
176
|
+
// Early exit if no activity
|
|
177
|
+
if (totalCommits === 0) {
|
|
178
|
+
return { empty: true, sinceRef };
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
const commits = gitLog(ctxDir, sinceRef, 50);
|
|
182
|
+
const diffStats = gitDiffStat(ctxDir, sinceRef);
|
|
183
|
+
const memoryFiles = analyzeMemoryFiles(ctxDir);
|
|
184
|
+
const branches = analyzeBranches(ctxDir);
|
|
185
|
+
const lastReflection = getLastReflectionSummary(ctxDir);
|
|
186
|
+
const last = lastCommit(ctxDir);
|
|
187
|
+
|
|
188
|
+
// Determine window dates
|
|
189
|
+
const windowEnd = new Date().toISOString().slice(0, 10);
|
|
190
|
+
const windowStart =
|
|
191
|
+
commits.length > 0 ? commits[commits.length - 1].date?.slice(0, 10) || 'unknown' : 'unknown';
|
|
192
|
+
|
|
193
|
+
// Write breadcrumb state for save phase
|
|
194
|
+
const state = {
|
|
195
|
+
window_start: windowStart,
|
|
196
|
+
window_end: windowEnd,
|
|
197
|
+
commits_reviewed: totalCommits,
|
|
198
|
+
last_commit_hash: last?.hash || null,
|
|
199
|
+
since_ref: sinceRef,
|
|
200
|
+
gathered_at: new Date().toISOString(),
|
|
201
|
+
};
|
|
202
|
+
writeContextFile(ctxDir, '.reflect-state.json', JSON.stringify(state, null, 2));
|
|
203
|
+
|
|
204
|
+
// Build prompt
|
|
205
|
+
const lines = [];
|
|
206
|
+
|
|
207
|
+
lines.push('🔍 REFLECTION INPUT');
|
|
208
|
+
lines.push(`Window: ${windowStart} → ${windowEnd} (${totalCommits} commits)`);
|
|
209
|
+
lines.push(
|
|
210
|
+
`Last reflection: ${lastReflection ? `${lastReflection.date} (${lastReflection.file})` : 'none (first reflection)'}`,
|
|
211
|
+
);
|
|
212
|
+
lines.push('');
|
|
213
|
+
|
|
214
|
+
// === RECENT ACTIVITY ===
|
|
215
|
+
lines.push('═══ RECENT ACTIVITY ═══');
|
|
216
|
+
lines.push('');
|
|
217
|
+
|
|
218
|
+
lines.push(`COMMITS (${totalCommits}):`);
|
|
219
|
+
for (let i = 0; i < commits.length; i++) {
|
|
220
|
+
const c = commits[i];
|
|
221
|
+
lines.push(` ${i + 1}. ${c.hash} | ${c.message} | ${c.date?.slice(0, 16) || ''}`);
|
|
222
|
+
}
|
|
223
|
+
if (totalCommits > 50) {
|
|
224
|
+
lines.push(` ... and ${totalCommits - 50} earlier commits`);
|
|
225
|
+
}
|
|
226
|
+
lines.push('');
|
|
227
|
+
|
|
228
|
+
if (diffStats.length > 0) {
|
|
229
|
+
lines.push('FILES CHANGED:');
|
|
230
|
+
for (const d of diffStats) {
|
|
231
|
+
lines.push(` ${d.file} +${d.added} -${d.removed} lines`);
|
|
232
|
+
}
|
|
233
|
+
lines.push('');
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
// Deep mode: include full diffs for memory files
|
|
237
|
+
if (flags.deep && sinceRef) {
|
|
238
|
+
const memoryDiff = gitDiffFiles(ctxDir, sinceRef, 'memory/');
|
|
239
|
+
if (memoryDiff) {
|
|
240
|
+
lines.push('MEMORY DIFFS (--deep):');
|
|
241
|
+
lines.push(memoryDiff);
|
|
242
|
+
lines.push('');
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
// Branches
|
|
247
|
+
if (branches.merged.length > 0 || branches.active.length > 0) {
|
|
248
|
+
lines.push('BRANCHES:');
|
|
249
|
+
for (const b of branches.merged) {
|
|
250
|
+
lines.push(` MERGED: ${b.name} → "${b.purpose}"`);
|
|
251
|
+
}
|
|
252
|
+
for (const b of branches.active) {
|
|
253
|
+
lines.push(` ACTIVE: ${b.name}${b.isCurrent ? ' *' : ''} → "${b.purpose}"`);
|
|
254
|
+
}
|
|
255
|
+
lines.push('');
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
// === CURRENT MEMORY STATE ===
|
|
259
|
+
lines.push('═══ CURRENT MEMORY STATE ═══');
|
|
260
|
+
lines.push('');
|
|
261
|
+
|
|
262
|
+
if (memoryFiles.length === 0) {
|
|
263
|
+
lines.push('(no memory files yet)');
|
|
264
|
+
} else {
|
|
265
|
+
for (const mf of memoryFiles) {
|
|
266
|
+
lines.push(`${mf.file} (${mf.entries} entries, ${formatSize(mf.size)}):`);
|
|
267
|
+
if (mf.entries === 0) {
|
|
268
|
+
lines.push(' (empty)');
|
|
269
|
+
} else {
|
|
270
|
+
// Show all entries for cross-reference (bullet entries and lesson headings)
|
|
271
|
+
const entryLines = mf.content
|
|
272
|
+
.split('\n')
|
|
273
|
+
.filter((l) => /^- \[/.test(l) || /^### \[/.test(l));
|
|
274
|
+
for (const entry of entryLines) {
|
|
275
|
+
lines.push(` ${entry}`);
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
lines.push('');
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
// === LAST REFLECTION ===
|
|
283
|
+
lines.push('═══ LAST REFLECTION ═══');
|
|
284
|
+
lines.push('');
|
|
285
|
+
if (lastReflection) {
|
|
286
|
+
lines.push(`(from ${lastReflection.file})`);
|
|
287
|
+
lines.push(lastReflection.summary || '(no summary)');
|
|
288
|
+
} else {
|
|
289
|
+
lines.push('First reflection — no prior context.');
|
|
290
|
+
lines.push('');
|
|
291
|
+
lines.push('This is your first reflection for this project. Focus on:');
|
|
292
|
+
lines.push('1. Are the memory entries so far capturing the RIGHT things?');
|
|
293
|
+
lines.push('2. Are any decisions already outdated?');
|
|
294
|
+
lines.push('3. What implicit knowledge about this project have you NOT written down?');
|
|
295
|
+
lines.push('4. Are the system/ files (conventions, project overview) still accurate?');
|
|
296
|
+
}
|
|
297
|
+
lines.push('');
|
|
298
|
+
|
|
299
|
+
// === COMPACTION MODE ===
|
|
300
|
+
if (flags.compaction) {
|
|
301
|
+
lines.push('═══ COMPACTION MODE ═══');
|
|
302
|
+
lines.push('Your context window is filling up. Focus on:');
|
|
303
|
+
lines.push('1. Summarize work into concise status (not deep analysis)');
|
|
304
|
+
lines.push('2. Identify memory entries to archive');
|
|
305
|
+
lines.push('3. Identify system/ files to shorten or unpin');
|
|
306
|
+
lines.push('4. After saving, consider unpinning less critical system files');
|
|
307
|
+
lines.push('');
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
// === REFLECTION QUESTIONS ===
|
|
311
|
+
lines.push('═══ REFLECTION QUESTIONS ═══');
|
|
312
|
+
lines.push('');
|
|
313
|
+
lines.push('1. PATTERNS: What recurring approaches or successful strategies emerged?');
|
|
314
|
+
lines.push(
|
|
315
|
+
'2. LESSONS: Were any problems debugged/fixed? Record each as a problem→resolution pair.',
|
|
316
|
+
);
|
|
317
|
+
lines.push('3. CONTRADICTIONS: Do any new entries conflict with existing ones?');
|
|
318
|
+
lines.push('4. GAPS: What important context is NOT yet captured?');
|
|
319
|
+
lines.push('5. STALE: Are any existing entries outdated?');
|
|
320
|
+
lines.push('6. THEMES: What overarching directions are emerging?');
|
|
321
|
+
lines.push('');
|
|
322
|
+
|
|
323
|
+
// === INSTRUCTIONS ===
|
|
324
|
+
lines.push('═══ INSTRUCTIONS ═══');
|
|
325
|
+
lines.push('');
|
|
326
|
+
lines.push('After reasoning, call:');
|
|
327
|
+
lines.push(' amem reflect save --content "YOUR_REFLECTION"');
|
|
328
|
+
lines.push('');
|
|
329
|
+
lines.push('Use this format:');
|
|
330
|
+
lines.push('');
|
|
331
|
+
lines.push('## Patterns Identified');
|
|
332
|
+
lines.push('- <pattern>');
|
|
333
|
+
lines.push('');
|
|
334
|
+
lines.push('## Decisions Validated');
|
|
335
|
+
lines.push('- <decision confirmed by recent work>');
|
|
336
|
+
lines.push('');
|
|
337
|
+
lines.push('## Contradictions Found');
|
|
338
|
+
lines.push('- <what conflicts, and resolution>');
|
|
339
|
+
lines.push('');
|
|
340
|
+
lines.push('## Stale Entries');
|
|
341
|
+
lines.push('- <file>: <entry to flag>');
|
|
342
|
+
lines.push('');
|
|
343
|
+
lines.push('## Lessons Learned');
|
|
344
|
+
lines.push('- type: lesson');
|
|
345
|
+
lines.push(' text: <short title>');
|
|
346
|
+
lines.push(' problem: <what went wrong>');
|
|
347
|
+
lines.push(' resolution: <what fixed it>');
|
|
348
|
+
lines.push('');
|
|
349
|
+
lines.push('## Gaps Filled');
|
|
350
|
+
lines.push('- type: decision|pattern|mistake|note');
|
|
351
|
+
lines.push(' text: <new entry to add>');
|
|
352
|
+
lines.push('');
|
|
353
|
+
lines.push('## Themes');
|
|
354
|
+
lines.push('- <overarching theme>');
|
|
355
|
+
lines.push('');
|
|
356
|
+
lines.push('## Summary');
|
|
357
|
+
lines.push('<2-3 sentence summary>');
|
|
358
|
+
|
|
359
|
+
return { empty: false, prompt: lines.join('\n'), state };
|
|
360
|
+
}
|