claude-mem-lite 2.3.0 → 2.3.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/marketplace.json +1 -1
- package/.claude-plugin/plugin.json +1 -1
- package/.mcp.json +2 -7
- package/commands/mem.md +7 -0
- package/commands/memory.md +51 -0
- package/commands/tools.md +78 -0
- package/commands/update.md +38 -0
- package/dispatch-inject.mjs +5 -4
- package/dispatch-workflow.mjs +155 -0
- package/dispatch.mjs +37 -15
- package/hook-handoff.mjs +222 -0
- package/hook-shared.mjs +10 -6
- package/hook.mjs +6 -5
- package/hooks/hooks.json +1 -1
- package/install.mjs +440 -11
- package/package.json +6 -1
- package/registry/preinstalled.json +0 -13
- package/registry-retriever.mjs +0 -3
- package/registry.mjs +1 -1
- package/schema.mjs +1 -0
- package/scripts/setup.sh +20 -1
- package/server.mjs +153 -159
- package/tool-schemas.mjs +4 -2
- package/utils.mjs +10 -2
package/hook-handoff.mjs
ADDED
|
@@ -0,0 +1,222 @@
|
|
|
1
|
+
// claude-mem-lite: Cross-session handoff extraction, detection, and injection
|
|
2
|
+
// Extracted for testability — hook.mjs has module-level side effects
|
|
3
|
+
|
|
4
|
+
import { basename } from 'path';
|
|
5
|
+
import { truncate, extractMatchKeywords, tokenizeHandoff, isSpecificTerm } from './utils.mjs';
|
|
6
|
+
import {
|
|
7
|
+
HANDOFF_EXPIRY_CLEAR, HANDOFF_EXPIRY_EXIT, HANDOFF_MATCH_THRESHOLD, CONTINUE_KEYWORDS,
|
|
8
|
+
} from './hook-shared.mjs';
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Build and save a handoff snapshot to session_handoffs table.
|
|
12
|
+
* Called synchronously during handleStop (/exit) or handleSessionStart (/clear).
|
|
13
|
+
* @param {Database} db Opened main database
|
|
14
|
+
* @param {string} sessionId Session being handed off
|
|
15
|
+
* @param {string} project Project identifier
|
|
16
|
+
* @param {'clear'|'exit'} type Handoff type
|
|
17
|
+
* @param {object|null} episodeSnapshot Episode buffer captured before flushing
|
|
18
|
+
*/
|
|
19
|
+
export function buildAndSaveHandoff(db, sessionId, project, type, episodeSnapshot) {
|
|
20
|
+
// 1. Working objective — from user prompts
|
|
21
|
+
const prompts = db.prepare(`
|
|
22
|
+
SELECT prompt_text FROM user_prompts
|
|
23
|
+
WHERE content_session_id = ?
|
|
24
|
+
ORDER BY prompt_number ASC LIMIT 5
|
|
25
|
+
`).all(sessionId);
|
|
26
|
+
if (prompts.length === 0) return; // Empty session — nothing to hand off
|
|
27
|
+
|
|
28
|
+
const workingOn = prompts.map(p => truncate(p.prompt_text, 200)).join(' → ');
|
|
29
|
+
|
|
30
|
+
// 2. Completed — from observations (include narrative for richer handoff)
|
|
31
|
+
const completed = db.prepare(`
|
|
32
|
+
SELECT title, type, narrative FROM observations
|
|
33
|
+
WHERE memory_session_id = ? AND COALESCE(compressed_into, 0) = 0
|
|
34
|
+
ORDER BY created_at_epoch DESC LIMIT 15
|
|
35
|
+
`).all(sessionId);
|
|
36
|
+
|
|
37
|
+
// 3. Unfinished — episode snapshot + full session edit history from narratives
|
|
38
|
+
let unfinished = '';
|
|
39
|
+
if (episodeSnapshot?.entries) {
|
|
40
|
+
const pendingDescs = episodeSnapshot.entries
|
|
41
|
+
.filter(e => e.isSignificant || e.isError)
|
|
42
|
+
.map(e => e.desc);
|
|
43
|
+
if (pendingDescs.length > 0) unfinished = pendingDescs.join('; ');
|
|
44
|
+
}
|
|
45
|
+
// Only the most recent bugfix is an "unfinished" signal (earlier ones are likely resolved)
|
|
46
|
+
if (!unfinished) {
|
|
47
|
+
const lastBugfix = completed.find(o => o.type === 'bugfix');
|
|
48
|
+
if (lastBugfix) unfinished = lastBugfix.title;
|
|
49
|
+
}
|
|
50
|
+
// Enrich unfinished with full session edit history from observation narratives.
|
|
51
|
+
// Since handoff is UPSERT (max 2 rows per project), storing more data is free.
|
|
52
|
+
const narratives = completed
|
|
53
|
+
.filter(c => c.narrative)
|
|
54
|
+
.map(c => c.narrative);
|
|
55
|
+
if (narratives.length > 0) {
|
|
56
|
+
const editHistory = narratives.join('\n');
|
|
57
|
+
unfinished = [unfinished, editHistory].filter(Boolean).join('\n---\n');
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
// 4. Key files — from episode snapshot + observations
|
|
61
|
+
const fileSet = new Set();
|
|
62
|
+
if (episodeSnapshot?.files) episodeSnapshot.files.forEach(f => fileSet.add(f));
|
|
63
|
+
const obsFiles = db.prepare(`
|
|
64
|
+
SELECT files_modified FROM observations
|
|
65
|
+
WHERE memory_session_id = ? AND files_modified IS NOT NULL
|
|
66
|
+
ORDER BY created_at_epoch DESC LIMIT 10
|
|
67
|
+
`).all(sessionId);
|
|
68
|
+
for (const row of obsFiles) {
|
|
69
|
+
try { JSON.parse(row.files_modified).forEach(f => fileSet.add(f)); } catch {}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
// 5. Key decisions — high importance observations
|
|
73
|
+
const decisions = db.prepare(`
|
|
74
|
+
SELECT title FROM observations
|
|
75
|
+
WHERE memory_session_id = ? AND COALESCE(importance, 1) >= 2
|
|
76
|
+
AND COALESCE(compressed_into, 0) = 0
|
|
77
|
+
ORDER BY created_at_epoch DESC LIMIT 5
|
|
78
|
+
`).all(sessionId);
|
|
79
|
+
|
|
80
|
+
// 6. Match keywords
|
|
81
|
+
const allText = [workingOn, ...completed.map(c => c.title).filter(Boolean), unfinished].join(' ');
|
|
82
|
+
const keywords = extractMatchKeywords(allText, [...fileSet]);
|
|
83
|
+
|
|
84
|
+
// UPSERT
|
|
85
|
+
db.prepare(`
|
|
86
|
+
INSERT INTO session_handoffs (project, type, session_id, working_on, completed, unfinished, key_files, key_decisions, match_keywords, created_at_epoch)
|
|
87
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
88
|
+
ON CONFLICT(project, type) DO UPDATE SET
|
|
89
|
+
session_id = excluded.session_id,
|
|
90
|
+
working_on = excluded.working_on,
|
|
91
|
+
completed = excluded.completed,
|
|
92
|
+
unfinished = excluded.unfinished,
|
|
93
|
+
key_files = excluded.key_files,
|
|
94
|
+
key_decisions = excluded.key_decisions,
|
|
95
|
+
match_keywords = excluded.match_keywords,
|
|
96
|
+
created_at_epoch = excluded.created_at_epoch
|
|
97
|
+
`).run(
|
|
98
|
+
project, type, sessionId,
|
|
99
|
+
truncate(workingOn, 1000),
|
|
100
|
+
completed.map(c => `[${c.type}] ${c.title}`).join('\n'),
|
|
101
|
+
truncate(unfinished, 3000),
|
|
102
|
+
JSON.stringify([...fileSet].slice(0, 20)),
|
|
103
|
+
decisions.map(d => d.title).join('\n'),
|
|
104
|
+
keywords,
|
|
105
|
+
Date.now()
|
|
106
|
+
);
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
/**
|
|
110
|
+
* Detect if user's prompt indicates continuation of previous work.
|
|
111
|
+
* Stage 1: Explicit keyword match (zero false positives).
|
|
112
|
+
* Stage 2: FTS5-style term overlap with handoff keywords.
|
|
113
|
+
* @param {Database} db Opened main database
|
|
114
|
+
* @param {string} promptText User's prompt text
|
|
115
|
+
* @param {string} project Project identifier
|
|
116
|
+
* @returns {boolean}
|
|
117
|
+
*/
|
|
118
|
+
export function detectContinuationIntent(db, promptText, project) {
|
|
119
|
+
// Stage 1: Explicit keyword match — always works, even without handoff
|
|
120
|
+
if (CONTINUE_KEYWORDS.test(promptText)) return true;
|
|
121
|
+
|
|
122
|
+
// Stage 2: FTS5-style term overlap with handoff keywords
|
|
123
|
+
const handoffs = db.prepare(`
|
|
124
|
+
SELECT type, match_keywords, created_at_epoch FROM session_handoffs
|
|
125
|
+
WHERE project = ? ORDER BY created_at_epoch DESC
|
|
126
|
+
`).all(project);
|
|
127
|
+
if (handoffs.length === 0) return false;
|
|
128
|
+
|
|
129
|
+
// Filter expired handoffs
|
|
130
|
+
const now = Date.now();
|
|
131
|
+
const validHandoffs = handoffs.filter(h => {
|
|
132
|
+
const age = now - h.created_at_epoch;
|
|
133
|
+
const maxAge = h.type === 'clear' ? HANDOFF_EXPIRY_CLEAR : HANDOFF_EXPIRY_EXIT;
|
|
134
|
+
return age <= maxAge;
|
|
135
|
+
});
|
|
136
|
+
if (validHandoffs.length === 0) return false;
|
|
137
|
+
|
|
138
|
+
// Use the most recent valid handoff for keyword matching
|
|
139
|
+
const handoff = validHandoffs[0];
|
|
140
|
+
const promptTokens = tokenizeHandoff(promptText);
|
|
141
|
+
const handoffTokens = new Set(tokenizeHandoff(handoff.match_keywords));
|
|
142
|
+
|
|
143
|
+
let score = 0;
|
|
144
|
+
for (const token of promptTokens) {
|
|
145
|
+
if (handoffTokens.has(token)) {
|
|
146
|
+
score += isSpecificTerm(token) ? 2 : 1;
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
return score >= HANDOFF_MATCH_THRESHOLD;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
/**
|
|
154
|
+
* Render handoff injection text for stdout.
|
|
155
|
+
* Reads the most recent handoff + optional session summary.
|
|
156
|
+
* @param {Database} db Opened main database
|
|
157
|
+
* @param {string} project Project identifier
|
|
158
|
+
* @returns {string|null} Injection text or null if no handoff
|
|
159
|
+
*/
|
|
160
|
+
export function renderHandoffInjection(db, project) {
|
|
161
|
+
const now = Date.now();
|
|
162
|
+
// Fetch recent handoffs and find the most recent non-expired one.
|
|
163
|
+
// A newer but expired 'clear' handoff (1h) must not shadow a still-valid 'exit' handoff (7d).
|
|
164
|
+
const handoffs = db.prepare(`
|
|
165
|
+
SELECT * FROM session_handoffs
|
|
166
|
+
WHERE project = ? ORDER BY created_at_epoch DESC LIMIT 5
|
|
167
|
+
`).all(project);
|
|
168
|
+
const handoff = handoffs.find(h => {
|
|
169
|
+
const age = now - h.created_at_epoch;
|
|
170
|
+
const maxAge = h.type === 'clear' ? HANDOFF_EXPIRY_CLEAR : HANDOFF_EXPIRY_EXIT;
|
|
171
|
+
return age <= maxAge;
|
|
172
|
+
});
|
|
173
|
+
if (!handoff) return null;
|
|
174
|
+
|
|
175
|
+
const ageSec = Math.round((Date.now() - handoff.created_at_epoch) / 1000);
|
|
176
|
+
const ageStr = ageSec < 60 ? `${ageSec}s` :
|
|
177
|
+
ageSec < 3600 ? `${Math.round(ageSec / 60)}m` :
|
|
178
|
+
ageSec < 86400 ? `${Math.round(ageSec / 3600)}h` :
|
|
179
|
+
`${Math.round(ageSec / 86400)}d`;
|
|
180
|
+
|
|
181
|
+
const lines = [`<session-handoff source="${handoff.type}" age="${ageStr}">`];
|
|
182
|
+
|
|
183
|
+
if (handoff.working_on) {
|
|
184
|
+
lines.push('## Working On', handoff.working_on, '');
|
|
185
|
+
}
|
|
186
|
+
if (handoff.completed) {
|
|
187
|
+
lines.push('## Completed', ...handoff.completed.split('\n').map(l => `- ${l}`), '');
|
|
188
|
+
}
|
|
189
|
+
if (handoff.unfinished) {
|
|
190
|
+
lines.push('## Unfinished', ...handoff.unfinished.split('; ').map(l => `- ${l}`), '');
|
|
191
|
+
}
|
|
192
|
+
if (handoff.key_files) {
|
|
193
|
+
try {
|
|
194
|
+
const files = JSON.parse(handoff.key_files);
|
|
195
|
+
if (files.length > 0) lines.push('## Key Files', files.map(f => basename(f)).join(', '), '');
|
|
196
|
+
} catch {}
|
|
197
|
+
}
|
|
198
|
+
if (handoff.key_decisions) {
|
|
199
|
+
lines.push('## Key Decisions', ...handoff.key_decisions.split('\n').map(l => `- ${l}`), '');
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
lines.push('</session-handoff>');
|
|
203
|
+
|
|
204
|
+
// Append session summary if available (long-gap enrichment)
|
|
205
|
+
try {
|
|
206
|
+
const summary = db.prepare(`
|
|
207
|
+
SELECT completed, next_steps, remaining_items FROM session_summaries
|
|
208
|
+
WHERE memory_session_id = ? AND project = ?
|
|
209
|
+
ORDER BY created_at_epoch DESC LIMIT 1
|
|
210
|
+
`).get(handoff.session_id, project);
|
|
211
|
+
if (summary && (summary.completed || summary.next_steps || summary.remaining_items)) {
|
|
212
|
+
lines.push('');
|
|
213
|
+
lines.push('<session-summary source="haiku">');
|
|
214
|
+
if (summary.completed) lines.push(summary.completed);
|
|
215
|
+
if (summary.remaining_items) lines.push(`Remaining: ${summary.remaining_items}`);
|
|
216
|
+
if (summary.next_steps) lines.push(`Next steps: ${summary.next_steps}`);
|
|
217
|
+
lines.push('</session-summary>');
|
|
218
|
+
}
|
|
219
|
+
} catch {}
|
|
220
|
+
|
|
221
|
+
return lines.join('\n');
|
|
222
|
+
}
|
package/hook-shared.mjs
CHANGED
|
@@ -6,7 +6,7 @@ import { randomUUID } from 'crypto';
|
|
|
6
6
|
import { join } from 'path';
|
|
7
7
|
import { existsSync, readFileSync, writeFileSync, appendFileSync, mkdirSync, renameSync, unlinkSync } from 'fs';
|
|
8
8
|
import { inferProject, debugCatch } from './utils.mjs';
|
|
9
|
-
import { ensureDb, DB_DIR } from './schema.mjs';
|
|
9
|
+
import { ensureDb, DB_DIR, REGISTRY_DB_PATH } from './schema.mjs';
|
|
10
10
|
import { ensureRegistryDb } from './registry.mjs';
|
|
11
11
|
import { getClaudePath as getClaudePathShared, resolveModel as resolveModelShared } from './haiku-client.mjs';
|
|
12
12
|
|
|
@@ -23,14 +23,14 @@ export const STALE_SESSION_MS = 24 * 60 * 60 * 1000; // 24h
|
|
|
23
23
|
export const STALE_LOCK_MS = 30000; // 30s
|
|
24
24
|
export const DEDUP_WINDOW_MS = 5 * 60 * 1000; // 5 min (title dedup)
|
|
25
25
|
export const RELATED_OBS_WINDOW_MS = 7 * 86400000; // 7 days
|
|
26
|
-
export const FALLBACK_OBS_WINDOW_MS =
|
|
26
|
+
export const FALLBACK_OBS_WINDOW_MS = RELATED_OBS_WINDOW_MS; // same window
|
|
27
27
|
export const RESOURCE_RESCAN_INTERVAL_MS = 60 * 60 * 1000; // 1 hour
|
|
28
28
|
|
|
29
29
|
// Handoff system constants
|
|
30
30
|
export const HANDOFF_EXPIRY_CLEAR = 3600000; // 1 hour
|
|
31
31
|
export const HANDOFF_EXPIRY_EXIT = 7 * 24 * 60 * 60 * 1000; // 7 days
|
|
32
32
|
export const HANDOFF_MATCH_THRESHOLD = 3; // min weighted score
|
|
33
|
-
export const CONTINUE_KEYWORDS = /继续|接着|上次|之前的|前面的|刚才|\bcontinue\b|\bresume\b|\bwhere[\s
|
|
33
|
+
export const CONTINUE_KEYWORDS = /继续|接着|上次|之前的|前面的|刚才|\bcontinue\b|\bresume\b|\bwhere[\s-]+we[\s-]+left\b|\bpick[\s-]+up\b|\bcarry[\s-]+on\b/i;
|
|
34
34
|
|
|
35
35
|
// Ensure runtime directory exists
|
|
36
36
|
try { if (!existsSync(RUNTIME_DIR)) mkdirSync(RUNTIME_DIR, { recursive: true }); } catch {}
|
|
@@ -70,8 +70,6 @@ export function openDb() {
|
|
|
70
70
|
}
|
|
71
71
|
|
|
72
72
|
// ─── Registry Database (dispatch system) ─────────────────────────────────────
|
|
73
|
-
|
|
74
|
-
const REGISTRY_DB_PATH = join(DB_DIR, 'resource-registry.db');
|
|
75
73
|
let _registryDb = null;
|
|
76
74
|
|
|
77
75
|
export function getRegistryDb() {
|
|
@@ -101,6 +99,7 @@ export function callLLM(prompt, timeoutMs = 15000) {
|
|
|
101
99
|
} catch (e) {
|
|
102
100
|
const out = _extractResponseFromError(e);
|
|
103
101
|
if (out) return out;
|
|
102
|
+
debugCatch(e, 'callLLM');
|
|
104
103
|
return null;
|
|
105
104
|
}
|
|
106
105
|
}
|
|
@@ -201,7 +200,12 @@ export function peekToolEvents() {
|
|
|
201
200
|
export function _extractResponseFromError(error) {
|
|
202
201
|
const out = error.stdout?.toString?.()?.trim() || error.output?.[1]?.toString?.()?.trim() || '';
|
|
203
202
|
if (out && out.startsWith('{') && out.endsWith('}')) {
|
|
204
|
-
try {
|
|
203
|
+
try {
|
|
204
|
+
const parsed = JSON.parse(out);
|
|
205
|
+
// Reject structurally incomplete responses (e.g. truncated mid-output)
|
|
206
|
+
if (typeof parsed !== 'object' || parsed === null || Object.keys(parsed).length === 0) return null;
|
|
207
|
+
return out;
|
|
208
|
+
} catch { return null; }
|
|
205
209
|
}
|
|
206
210
|
return null;
|
|
207
211
|
}
|
package/hook.mjs
CHANGED
|
@@ -11,6 +11,7 @@ import {
|
|
|
11
11
|
truncate, typeIcon, inferProject, detectBashSignificance,
|
|
12
12
|
extractErrorKeywords, extractFilePaths, isRelatedToEpisode,
|
|
13
13
|
makeEntryDesc, scrubSecrets, EDIT_TOOLS, debugCatch, debugLog, fmtTime,
|
|
14
|
+
COMPRESSED_AUTO,
|
|
14
15
|
} from './utils.mjs';
|
|
15
16
|
import {
|
|
16
17
|
readEpisodeRaw, episodeFile,
|
|
@@ -256,7 +257,7 @@ async function handlePostToolUse() {
|
|
|
256
257
|
appendToolEvent({
|
|
257
258
|
tool_name,
|
|
258
259
|
tool_input: toolInput,
|
|
259
|
-
tool_response: (tool_name === 'Bash' && bashSig?.isError) ? resp.slice(0, 500) : '',
|
|
260
|
+
tool_response: (tool_name === 'Bash' && bashSig?.isError) ? scrubSecrets(resp.slice(0, 500)) : '',
|
|
260
261
|
});
|
|
261
262
|
}
|
|
262
263
|
} finally {
|
|
@@ -457,7 +458,7 @@ async function handleSessionStart() {
|
|
|
457
458
|
// Auto-compress: mark old low-importance observations as compressed (30+ days, importance=1)
|
|
458
459
|
// Lightweight: only marks rows, doesn't create summaries (full compression via mem_compress)
|
|
459
460
|
const compressed = db.prepare(`
|
|
460
|
-
UPDATE observations SET compressed_into =
|
|
461
|
+
UPDATE observations SET compressed_into = ${COMPRESSED_AUTO}
|
|
461
462
|
WHERE COALESCE(compressed_into, 0) = 0
|
|
462
463
|
AND importance = 1
|
|
463
464
|
AND created_at_epoch < ?
|
|
@@ -883,7 +884,7 @@ async function handleResourceScan() {
|
|
|
883
884
|
}
|
|
884
885
|
|
|
885
886
|
// Upsert changed resources with fallback metadata (no Haiku)
|
|
886
|
-
let
|
|
887
|
+
let upsertErrors = 0;
|
|
887
888
|
for (const res of toIndex) {
|
|
888
889
|
try {
|
|
889
890
|
upsertResource(rdb, {
|
|
@@ -898,7 +899,7 @@ async function handleResourceScan() {
|
|
|
898
899
|
trigger_patterns: `when user needs ${res.name.replace(/-/g, ' ').replace(/\//g, ' ')}`,
|
|
899
900
|
capability_summary: `${res.type}: ${res.name.replace(/-/g, ' ')}`,
|
|
900
901
|
});
|
|
901
|
-
} catch (e) { if (
|
|
902
|
+
} catch (e) { upsertErrors++; if (upsertErrors <= 3) debugCatch(e, `handleResourceScan-upsert[${upsertErrors}]`); }
|
|
902
903
|
}
|
|
903
904
|
|
|
904
905
|
// Disable resources no longer on filesystem
|
|
@@ -921,7 +922,7 @@ function readStdin() {
|
|
|
921
922
|
const MAX_STDIN = 256 * 1024; // 256KB — large tool responses are truncated
|
|
922
923
|
return new Promise((resolve, reject) => {
|
|
923
924
|
let data = '';
|
|
924
|
-
const timeout = setTimeout(() => { process.stdin.destroy(); reject(new Error('timeout')); }, 3000);
|
|
925
|
+
const timeout = setTimeout(() => { debugLog('WARN', 'readStdin', 'stdin timeout after 3s — event dropped'); process.stdin.destroy(); reject(new Error('timeout')); }, 3000);
|
|
925
926
|
process.stdin.setEncoding('utf8');
|
|
926
927
|
process.stdin.on('data', chunk => {
|
|
927
928
|
data += chunk;
|