claude-mem-lite 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/hook-llm.mjs ADDED
@@ -0,0 +1,358 @@
1
+ // claude-mem-lite: Background LLM workers for episode extraction and session summaries
2
+ // Extracted from hook.mjs for testability and reduced complexity
3
+
4
+ import { basename } from 'path';
5
+ import { existsSync, readFileSync, unlinkSync, readdirSync } from 'fs';
6
+ import {
7
+ jaccardSimilarity, truncate, clampImportance, computeRuleImportance,
8
+ inferProject, parseJsonFromLLM,
9
+ computeMinHash, estimateJaccardFromMinHash, debugCatch, debugLog,
10
+ } from './utils.mjs';
11
+ import { acquireLLMSlot, releaseLLMSlot } from './hook-semaphore.mjs';
12
+ import {
13
+ RUNTIME_DIR, DEDUP_WINDOW_MS, RELATED_OBS_WINDOW_MS,
14
+ sessionFile, getSessionId, openDb, callLLM, sleep,
15
+ } from './hook-shared.mjs';
16
+
17
+ // ─── Save Observation to DB ─────────────────────────────────────────────────
18
+
19
+ export function saveObservation(obs, projectOverride, sessionIdOverride, externalDb) {
20
+ const db = externalDb || openDb();
21
+ if (!db) return null;
22
+
23
+ try {
24
+ const now = new Date();
25
+ const project = projectOverride || inferProject();
26
+ const sessionId = sessionIdOverride || getSessionId();
27
+
28
+ db.prepare(`
29
+ INSERT OR IGNORE INTO sdk_sessions (content_session_id, memory_session_id, project, started_at, started_at_epoch, status)
30
+ VALUES (?, ?, ?, ?, ?, 'active')
31
+ `).run(sessionId, sessionId, project, now.toISOString(), now.getTime());
32
+
33
+ // Two-tier dedup
34
+ // Tier 1 (fast): 5-min Jaccard on titles
35
+ const fiveMinAgo = now.getTime() - DEDUP_WINDOW_MS;
36
+ const recent = db.prepare(`
37
+ SELECT title FROM observations
38
+ WHERE project = ? AND created_at_epoch > ?
39
+ ORDER BY created_at_epoch DESC LIMIT 10
40
+ `).all(project, fiveMinAgo);
41
+
42
+ if (obs.title && recent.some(r => jaccardSimilarity(r.title, obs.title) > 0.7)) {
43
+ return null;
44
+ }
45
+
46
+ // Tier 2 (slow): MinHash cross-session dedup (7-day window)
47
+ const minhashSig = computeMinHash((obs.title || '') + ' ' + (obs.narrative || ''));
48
+ if (minhashSig) {
49
+ const sevenDaysAgo = now.getTime() - RELATED_OBS_WINDOW_MS;
50
+ const recentSigs = db.prepare(`
51
+ SELECT minhash_sig FROM observations
52
+ WHERE project = ? AND created_at_epoch > ? AND minhash_sig IS NOT NULL
53
+ ORDER BY created_at_epoch DESC LIMIT 100
54
+ `).all(project, sevenDaysAgo);
55
+
56
+ if (recentSigs.some(r => estimateJaccardFromMinHash(minhashSig, r.minhash_sig) > 0.8)) {
57
+ return null;
58
+ }
59
+ }
60
+
61
+ const conceptsText = Array.isArray(obs.concepts) ? obs.concepts.join(' ') : '';
62
+ const factsText = Array.isArray(obs.facts) ? obs.facts.join(' ') : '';
63
+ const textField = [conceptsText, factsText].filter(Boolean).join(' ');
64
+
65
+ const result = db.prepare(`
66
+ INSERT INTO observations (memory_session_id, project, text, type, title, subtitle, narrative, concepts, facts, files_read, files_modified, importance, minhash_sig, created_at, created_at_epoch)
67
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
68
+ `).run(
69
+ sessionId, project,
70
+ textField, obs.type, obs.title, obs.subtitle || '',
71
+ obs.narrative || '',
72
+ conceptsText,
73
+ factsText,
74
+ JSON.stringify(obs.filesRead || []),
75
+ JSON.stringify(obs.files || []),
76
+ obs.importance ?? 1,
77
+ minhashSig,
78
+ now.toISOString(), now.getTime()
79
+ );
80
+ return Number(result.lastInsertRowid);
81
+ } finally {
82
+ if (!externalDb) db.close();
83
+ }
84
+ }
85
+
86
+ // ─── Related Observation Linking ─────────────────────────────────────────────
87
+
88
+ function linkRelatedObservations(db, savedId, obs, episode) {
89
+ const newObs = db.prepare(`
90
+ SELECT id, title, files_modified, related_ids FROM observations WHERE id = ?
91
+ `).get(savedId);
92
+ if (!newObs) return;
93
+
94
+ const candidates = new Set();
95
+
96
+ // Strategy 1: FTS5 title similarity (cross-session)
97
+ if (obs.title) {
98
+ const titleTokens = obs.title.replace(/[^a-zA-Z0-9_\s-]/g, ' ').split(/\s+/)
99
+ .filter(t => t.length > 2).slice(0, 5);
100
+ if (titleTokens.length > 0) {
101
+ const ftsQuery = titleTokens.map(t => `"${t.replace(/"/g, '""')}"`).join(' OR ');
102
+ try {
103
+ const ftsMatches = db.prepare(`
104
+ SELECT o.id FROM observations_fts
105
+ JOIN observations o ON observations_fts.rowid = o.id
106
+ WHERE observations_fts MATCH ? AND o.id != ? AND o.project = ?
107
+ ORDER BY bm25(observations_fts, 10, 5, 5, 3, 3, 2)
108
+ LIMIT 5
109
+ `).all(ftsQuery, newObs.id, episode.project);
110
+ for (const m of ftsMatches) candidates.add(m.id);
111
+ } catch (e) { debugCatch(e, 'linkRelated-fts'); }
112
+ }
113
+ }
114
+
115
+ // Strategy 2: file overlap (any session, recent observations)
116
+ let newFiles;
117
+ try { newFiles = JSON.parse(newObs.files_modified || '[]'); } catch (e) { debugCatch(e, 'linkRelated-newFiles'); newFiles = []; }
118
+ if (!Array.isArray(newFiles) || !newFiles.every(f => typeof f === 'string')) newFiles = [];
119
+ if (newFiles.length > 0) {
120
+ const recentObs = db.prepare(`
121
+ SELECT id, files_modified FROM observations
122
+ WHERE id != ? AND created_at_epoch > ? AND project = ?
123
+ ORDER BY created_at_epoch DESC LIMIT 50
124
+ `).all(newObs.id, Date.now() - RELATED_OBS_WINDOW_MS, episode.project);
125
+ for (const r of recentObs) {
126
+ let rFiles;
127
+ try { rFiles = JSON.parse(r.files_modified || '[]'); } catch (e) { debugCatch(e, 'linkRelated-rFiles'); rFiles = []; }
128
+ if (!Array.isArray(rFiles) || !rFiles.every(f => typeof f === 'string')) rFiles = [];
129
+ if (rFiles.some(f => newFiles.includes(f))) candidates.add(r.id);
130
+ }
131
+ }
132
+
133
+ // Apply bidirectional links (max 5 related)
134
+ if (candidates.size > 0) {
135
+ let newRelated;
136
+ try { newRelated = JSON.parse(newObs.related_ids || '[]'); } catch (e) { debugCatch(e, 'linkRelated-newRelated'); newRelated = []; }
137
+ if (!Array.isArray(newRelated) || !newRelated.every(id => Number.isInteger(id))) newRelated = [];
138
+
139
+ for (const relId of [...candidates].slice(0, 5)) {
140
+ if (newRelated.includes(relId)) continue;
141
+ newRelated.push(relId);
142
+
143
+ const rel = db.prepare('SELECT related_ids FROM observations WHERE id = ?').get(relId);
144
+ if (rel) {
145
+ let relRelated;
146
+ try { relRelated = JSON.parse(rel.related_ids || '[]'); } catch (e) { debugCatch(e, 'linkRelated-relRelated'); relRelated = []; }
147
+ if (!Array.isArray(relRelated) || !relRelated.every(id => Number.isInteger(id))) relRelated = [];
148
+ if (!relRelated.includes(newObs.id)) {
149
+ relRelated.push(newObs.id);
150
+ db.prepare('UPDATE observations SET related_ids = ? WHERE id = ?').run(JSON.stringify(relRelated.slice(-10)), relId);
151
+ }
152
+ }
153
+ }
154
+
155
+ db.prepare('UPDATE observations SET related_ids = ? WHERE id = ?').run(JSON.stringify(newRelated.slice(-10)), newObs.id);
156
+ }
157
+ }
158
+
159
+ // ─── Background: LLM Episode Extraction (Tier 2 F) ──────────────────────────
160
+
161
+ export async function handleLLMEpisode() {
162
+ const tmpFile = process.argv[3];
163
+ if (!tmpFile) return;
164
+
165
+ let episode;
166
+ try {
167
+ episode = JSON.parse(readFileSync(tmpFile, 'utf8'));
168
+ } catch {
169
+ try { unlinkSync(tmpFile); } catch {}
170
+ return;
171
+ }
172
+
173
+ if (!episode.entries || episode.entries.length === 0) {
174
+ try { unlinkSync(tmpFile); } catch {}
175
+ return;
176
+ }
177
+
178
+ // Rate-limit background LLM calls to avoid competing with active sessions
179
+ if (!process.env.CLAUDE_MEM_NO_DELAY) {
180
+ const sessionActive = existsSync(sessionFile());
181
+ const delayMs = sessionActive
182
+ ? 2000 + Math.random() * 3000
183
+ : 500 + Math.random() * 1000;
184
+ debugLog('DEBUG', 'llm-episode', `delay: ${Math.round(delayMs)}ms (session ${sessionActive ? 'active' : 'ended'})`);
185
+ await sleep(delayMs);
186
+ }
187
+
188
+ const fileList = episode.files.map(f => basename(f)).join(', ') || '(multiple)';
189
+
190
+ let prompt;
191
+ if (episode.entries.length === 1) {
192
+ const e = episode.entries[0];
193
+ prompt = `Extract a structured observation from this code change. Return ONLY valid JSON, no markdown fences.
194
+
195
+ Tool: ${e.tool}
196
+ File: ${episode.files.join(', ') || 'unknown'}
197
+ Action: ${e.desc}
198
+ Error: ${e.isError ? 'yes' : 'no'}
199
+
200
+ JSON: {"type":"decision|bugfix|feature|refactor|discovery|change","title":"concise ≤80 char description","narrative":"what changed, why, and outcome (2-3 sentences)","concepts":["kw1","kw2"],"facts":["fact1","fact2"],"importance":1}
201
+ Facts: each MUST be (1) atomic—one claim, (2) self-contained—no pronouns, include file/function name, (3) specific—"refreshToken() in auth.ts:45 uses 1h TTL" not "handles tokens"
202
+ importance: 1=routine, 2=notable (error fix, arch decision, config change), 3=critical (breaking change, security fix, data migration)`;
203
+ } else {
204
+ const actionList = episode.entries.map((e, i) =>
205
+ `${i + 1}. [${e.tool}] ${e.desc}${e.isError ? ' (ERROR)' : ''}`
206
+ ).join('\n');
207
+
208
+ prompt = `Summarize this coding episode as ONE coherent observation. Return ONLY valid JSON, no markdown fences.
209
+
210
+ Project: ${episode.project}
211
+ Files: ${fileList}
212
+ Actions (${episode.entries.length} total):
213
+ ${actionList}
214
+
215
+ JSON: {"type":"decision|bugfix|feature|refactor|discovery|change","title":"coherent ≤80 char summary","narrative":"what was done, why, and outcome (3-5 sentences)","concepts":["keyword1","keyword2"],"facts":["specific fact 1","specific fact 2"],"importance":1}
216
+ Facts: each MUST be (1) atomic—one claim, (2) self-contained—no pronouns, include file/function name, (3) specific—"refreshToken() in auth.ts:45 uses 1h TTL" not "handles tokens"
217
+ importance: 1=routine, 2=notable (error fix, arch decision, config change), 3=critical (breaking change, security fix, data migration)`;
218
+ }
219
+
220
+ const ruleImportance = computeRuleImportance(episode);
221
+
222
+ let obs;
223
+ const validTypes = new Set(['decision', 'bugfix', 'feature', 'refactor', 'discovery', 'change']);
224
+
225
+ const gotSlot = await acquireLLMSlot();
226
+ if (gotSlot) {
227
+ let raw, parsed;
228
+ try {
229
+ raw = callLLM(prompt);
230
+ parsed = parseJsonFromLLM(raw);
231
+ } finally {
232
+ releaseLLMSlot();
233
+ }
234
+
235
+ if (parsed && parsed.title) {
236
+ obs = {
237
+ type: validTypes.has(parsed.type) ? parsed.type : 'change',
238
+ title: truncate(parsed.title, 120),
239
+ subtitle: fileList,
240
+ narrative: truncate(parsed.narrative || '', 500),
241
+ concepts: Array.isArray(parsed.concepts) ? parsed.concepts.slice(0, 10) : [],
242
+ facts: Array.isArray(parsed.facts) ? parsed.facts.slice(0, 10) : [],
243
+ files: episode.files,
244
+ filesRead: episode.filesRead || [],
245
+ importance: Math.max(ruleImportance, clampImportance(parsed.importance)),
246
+ };
247
+ }
248
+ }
249
+
250
+ if (!obs) {
251
+ if (!gotSlot) debugLog('WARN', 'llm-episode', 'semaphore timeout, using degraded storage');
252
+ const hasError = episode.entries.some(e => e.isError);
253
+ const hasEdit = episode.entries.some(e => ['Edit', 'Write', 'NotebookEdit'].includes(e.tool));
254
+ const inferredType = hasError ? 'bugfix' : hasEdit ? 'change' : 'discovery';
255
+ const firstDesc = episode.entries[0]?.desc || '(no description)';
256
+ obs = {
257
+ type: inferredType,
258
+ title: truncate(firstDesc, 120),
259
+ subtitle: fileList,
260
+ narrative: episode.entries.map(e => e.desc).join('; '),
261
+ concepts: [],
262
+ facts: [],
263
+ files: episode.files,
264
+ filesRead: episode.filesRead || [],
265
+ importance: ruleImportance,
266
+ };
267
+ }
268
+
269
+ const db = openDb();
270
+ if (!db) { try { unlinkSync(tmpFile); } catch {} return; }
271
+
272
+ try {
273
+ const savedId = saveObservation(obs, episode.project, episode.sessionId, db);
274
+
275
+ if (savedId) {
276
+ try {
277
+ linkRelatedObservations(db, savedId, obs, episode);
278
+ } catch (e) { debugCatch(e, 'relatedObsLinking'); }
279
+ }
280
+ } finally {
281
+ db.close();
282
+ }
283
+
284
+ try { unlinkSync(tmpFile); } catch {}
285
+ }
286
+
287
+ // ─── Background: LLM Session Summary ────────────────────────────────────────
288
+
289
+ export async function handleLLMSummary() {
290
+ const parsed = parseInt(process.env.CLAUDE_MEM_FLUSH_TIMEOUT, 10);
291
+ const flushTimeout = Number.isNaN(parsed) ? 15 : parsed;
292
+ for (let i = 0; i < flushTimeout; i++) {
293
+ try {
294
+ const files = readdirSync(RUNTIME_DIR).filter(f => f.startsWith('ep-flush-'));
295
+ if (files.length === 0) break;
296
+ } catch { break; }
297
+ debugLog('DEBUG', 'llm-summary', `waiting for flush files (${i + 1}/15)`);
298
+ await sleep(1000);
299
+ }
300
+
301
+ const db = openDb();
302
+ if (!db) return;
303
+
304
+ try {
305
+ const sessionId = process.argv[3] || getSessionId();
306
+ const project = process.argv[4] || inferProject();
307
+
308
+ const recentObs = db.prepare(`
309
+ SELECT id, type, title, narrative
310
+ FROM observations
311
+ WHERE memory_session_id = ?
312
+ ORDER BY created_at_epoch DESC
313
+ LIMIT 30
314
+ `).all(sessionId);
315
+
316
+ if (recentObs.length < 1) return;
317
+
318
+ const obsList = recentObs.map((o, i) =>
319
+ `${i + 1}. [${o.type}] ${o.title}${o.narrative ? ': ' + truncate(o.narrative, 80) : ''}`
320
+ ).join('\n');
321
+
322
+ const prompt = `Summarize this coding session. Return ONLY valid JSON, no markdown fences.
323
+
324
+ Project: ${project}
325
+ Observations (${recentObs.length} total):
326
+ ${obsList}
327
+
328
+ JSON: {"request":"what the user was working on","investigated":"what was explored/analyzed","learned":"key findings","completed":"what was accomplished","next_steps":"suggested follow-up"}`;
329
+
330
+ if (!(await acquireLLMSlot())) {
331
+ debugLog('WARN', 'llm-summary', 'semaphore timeout, skipping summary');
332
+ return;
333
+ }
334
+
335
+ let raw, llmParsed;
336
+ try {
337
+ raw = callLLM(prompt, 20000);
338
+ llmParsed = parseJsonFromLLM(raw);
339
+ } finally {
340
+ releaseLLMSlot();
341
+ }
342
+
343
+ if (llmParsed && llmParsed.request) {
344
+ const now = new Date();
345
+ db.prepare(`
346
+ INSERT INTO session_summaries (memory_session_id, project, request, investigated, learned, completed, next_steps, files_read, files_edited, notes, created_at, created_at_epoch)
347
+ VALUES (?, ?, ?, ?, ?, ?, ?, '[]', '[]', '', ?, ?)
348
+ `).run(
349
+ sessionId, project,
350
+ llmParsed.request || '', llmParsed.investigated || '', llmParsed.learned || '',
351
+ llmParsed.completed || '', llmParsed.next_steps || '',
352
+ now.toISOString(), now.getTime()
353
+ );
354
+ }
355
+ } finally {
356
+ db.close();
357
+ }
358
+ }
@@ -0,0 +1,84 @@
1
+ // claude-mem-lite LLM concurrency semaphore
2
+ // Limits concurrent claude -p calls to prevent resource contention
3
+
4
+ import { join } from 'path';
5
+ import { readFileSync, writeFileSync, unlinkSync, readdirSync, openSync, closeSync, writeSync, constants as fsConstants } from 'fs';
6
+ import { RUNTIME_DIR } from './hook-shared.mjs';
7
+
8
+ export const LLM_SEM_MAX = 2;
9
+ export const LLM_SEM_TIMEOUT = 30000; // 30s max wait
10
+
11
+ export const sleepMs = (ms) => new Promise(r => setTimeout(r, ms));
12
+
13
+ /**
14
+ * Acquire a file-based semaphore slot for LLM calls.
15
+ * Uses acquire-then-verify: atomically creates a slot file, then checks total count.
16
+ * @returns {Promise<boolean>} true if slot acquired, false on timeout
17
+ */
18
+ export async function acquireLLMSlot() {
19
+ const deadline = Date.now() + LLM_SEM_TIMEOUT;
20
+ const slotFile = join(RUNTIME_DIR, `llm-sem-${process.pid}`);
21
+
22
+ while (Date.now() < deadline) {
23
+ // Acquire-then-verify: atomically create our slot first, then check total count
24
+ let created = false;
25
+ try {
26
+ let fd;
27
+ try {
28
+ fd = openSync(slotFile, fsConstants.O_CREAT | fsConstants.O_EXCL | fsConstants.O_WRONLY);
29
+ const payload = JSON.stringify({ pid: process.pid, ts: Date.now() });
30
+ writeSync(fd, payload);
31
+ created = true;
32
+ } finally {
33
+ if (fd !== undefined) closeSync(fd);
34
+ }
35
+ } catch {
36
+ // Slot file already exists for this PID — update timestamp
37
+ try { writeFileSync(slotFile, JSON.stringify({ pid: process.pid, ts: Date.now() })); created = true; } catch {}
38
+ }
39
+
40
+ if (!created) { await sleepMs(200 + Math.random() * 800); continue; }
41
+
42
+ // Count all active semaphore files (including ours) and clean stale ones
43
+ let active = 0;
44
+ try {
45
+ for (const f of readdirSync(RUNTIME_DIR)) {
46
+ if (!f.startsWith('llm-sem-')) continue;
47
+ const fp = join(RUNTIME_DIR, f);
48
+ try {
49
+ const raw = readFileSync(fp, 'utf8');
50
+ const info = JSON.parse(raw);
51
+ const age = Date.now() - (info.ts || 0);
52
+ if (age > 60000) {
53
+ try { unlinkSync(fp); } catch {}
54
+ continue;
55
+ }
56
+ if (info.pid) {
57
+ try { process.kill(info.pid, 0); active++; } catch (killErr) {
58
+ if (killErr.code === 'ESRCH') { try { unlinkSync(fp); } catch {} }
59
+ else { active++; } // EPERM = process exists but different user
60
+ }
61
+ } else {
62
+ active++;
63
+ }
64
+ } catch {
65
+ active++;
66
+ }
67
+ }
68
+ } catch {}
69
+
70
+ if (active <= LLM_SEM_MAX) return true; // Slot acquired
71
+
72
+ // Too many concurrent — release our slot and back off
73
+ try { unlinkSync(slotFile); } catch {}
74
+ await sleepMs(200 + Math.random() * 800);
75
+ }
76
+ return false; // Timed out
77
+ }
78
+
79
+ /**
80
+ * Release the file-based semaphore slot for the current process.
81
+ */
82
+ export function releaseLLMSlot() {
83
+ try { unlinkSync(join(RUNTIME_DIR, `llm-sem-${process.pid}`)); } catch {}
84
+ }
@@ -0,0 +1,174 @@
1
+ // claude-mem-lite: Shared infrastructure for hook.mjs and hook-llm.mjs
2
+ // Constants, session management, DB access, LLM calls, process utilities
3
+
4
+ import { execFileSync, spawn } from 'child_process';
5
+ import { randomUUID } from 'crypto';
6
+ import { join } from 'path';
7
+ import { existsSync, readFileSync, writeFileSync, appendFileSync, mkdirSync, renameSync, unlinkSync } from 'fs';
8
+ import { inferProject, debugCatch } from './utils.mjs';
9
+ import { ensureDb, DB_DIR } from './schema.mjs';
10
+ import { ensureRegistryDb } from './registry.mjs';
11
+ import { getClaudePath as getClaudePathShared, resolveModel as resolveModelShared } from './haiku-client.mjs';
12
+
13
+ // ─── Constants ────────────────────────────────────────────────────────────────
14
+
15
+ export const RUNTIME_DIR = join(DB_DIR, 'runtime');
16
+ export const SCRIPT_PATH = process.argv[1];
17
+
18
+ // Timing constants
19
+ export const EPISODE_BUFFER_SIZE = 10;
20
+ export const EPISODE_TIME_GAP_MS = 5 * 60 * 1000; // 5 min
21
+ export const SESSION_EXPIRY_MS = 12 * 60 * 60 * 1000; // 12h
22
+ export const STALE_SESSION_MS = 24 * 60 * 60 * 1000; // 24h
23
+ export const STALE_LOCK_MS = 30000; // 30s
24
+ export const DEDUP_WINDOW_MS = 5 * 60 * 1000; // 5 min (title dedup)
25
+ export const RELATED_OBS_WINDOW_MS = 7 * 86400000; // 7 days
26
+ export const FALLBACK_OBS_WINDOW_MS = 7 * 24 * 60 * 60 * 1000; // 7 days
27
+ export const RESOURCE_RESCAN_INTERVAL_MS = 60 * 60 * 1000; // 1 hour
28
+
29
+ // Ensure runtime directory exists
30
+ try { if (!existsSync(RUNTIME_DIR)) mkdirSync(RUNTIME_DIR, { recursive: true }); } catch {}
31
+
32
+ // ─── Session ID Management ───────────────────────────────────────────────────
33
+
34
+ export function sessionFile() {
35
+ return join(RUNTIME_DIR, `session-${inferProject()}`);
36
+ }
37
+
38
+ export function getSessionId() {
39
+ try {
40
+ const data = JSON.parse(readFileSync(sessionFile(), 'utf8'));
41
+ if (Date.now() - data.startedAt < SESSION_EXPIRY_MS) return data.id;
42
+ } catch {}
43
+ return createSessionId();
44
+ }
45
+
46
+ export function createSessionId() {
47
+ const project = inferProject();
48
+ const id = `hook-${project}-${randomUUID().slice(0, 8)}`;
49
+ const file = sessionFile();
50
+ const tmp = file + `.tmp-${process.pid}`;
51
+ writeFileSync(tmp, JSON.stringify({ id, startedAt: Date.now(), project }));
52
+ renameSync(tmp, file);
53
+ return id;
54
+ }
55
+
56
+ // ─── Database ────────────────────────────────────────────────────────────────
57
+
58
+ export function openDb() {
59
+ try {
60
+ return ensureDb();
61
+ } catch {
62
+ return null;
63
+ }
64
+ }
65
+
66
+ // ─── Registry Database (dispatch system) ─────────────────────────────────────
67
+
68
+ const REGISTRY_DB_PATH = join(DB_DIR, 'resource-registry.db');
69
+ let _registryDb = null;
70
+
71
+ export function getRegistryDb() {
72
+ if (_registryDb) return _registryDb;
73
+ try { _registryDb = ensureRegistryDb(REGISTRY_DB_PATH); } catch (e) { debugCatch(e, 'getRegistryDb'); }
74
+ return _registryDb;
75
+ }
76
+
77
+ export function closeRegistryDb() {
78
+ if (_registryDb) try { _registryDb.close(); } catch {}
79
+ _registryDb = null;
80
+ }
81
+
82
+ // ─── LLM via claude CLI ─────────────────────────────────────────────────────
83
+
84
+ export function callLLM(prompt, timeoutMs = 15000) {
85
+ const { cli: modelName } = resolveModelShared();
86
+ try {
87
+ const result = execFileSync(getClaudePathShared(), ['-p', '--model', modelName], {
88
+ input: prompt,
89
+ timeout: timeoutMs,
90
+ encoding: 'utf8',
91
+ env: { ...process.env, CLAUDE_MEM_HOOK_RUNNING: '1' },
92
+ stdio: ['pipe', 'pipe', 'pipe'],
93
+ });
94
+ return result.trim();
95
+ } catch (e) {
96
+ const out = _extractResponseFromError(e);
97
+ if (out) return out;
98
+ return null;
99
+ }
100
+ }
101
+
102
+ // ─── Background Spawner ─────────────────────────────────────────────────────
103
+
104
+ export function spawnBackground(bgEvent, ...extraArgs) {
105
+ const args = [SCRIPT_PATH, bgEvent, ...extraArgs];
106
+ try {
107
+ const child = spawn(process.execPath, args, {
108
+ detached: true,
109
+ stdio: 'ignore',
110
+ env: { ...process.env, CLAUDE_MEM_HOOK_RUNNING: '1' },
111
+ });
112
+ child.on('error', (err) => { debugCatch(err, 'spawnBackground'); });
113
+ child.on('exit', () => {});
114
+ child.unref();
115
+ } catch (err) {
116
+ debugCatch(err, 'spawnBackground');
117
+ }
118
+ }
119
+
120
+ // ─── Utilities ──────────────────────────────────────────────────────────────
121
+
122
+ export function sleep(ms) { return new Promise(r => setTimeout(r, ms)); }
123
+
124
+ // ─── Tool Event Tracking (for dispatch feedback) ────────────────────────────
125
+ // PostToolUse appends feedback-relevant tool events (Skill, Task, Edit, Write, Bash errors).
126
+ // Stop handler reads them and passes to collectFeedback for adoption/outcome detection.
127
+
128
+ export function toolEventsFile() {
129
+ return join(RUNTIME_DIR, `tool-events-${inferProject()}.jsonl`);
130
+ }
131
+
132
+ /**
133
+ * Append a tool event for feedback tracking.
134
+ * Only call for feedback-relevant events (Skill, Task, Edit, Write, Bash).
135
+ * @param {object} event { tool_name, tool_input, tool_response }
136
+ */
137
+ export function appendToolEvent(event) {
138
+ try {
139
+ appendFileSync(toolEventsFile(), JSON.stringify(event) + '\n');
140
+ } catch {}
141
+ }
142
+
143
+ /**
144
+ * Read all tracked tool events and remove the file.
145
+ * Uses rename→read→delete for atomicity.
146
+ * @returns {object[]} Array of tool event objects
147
+ */
148
+ export function readAndClearToolEvents() {
149
+ const file = toolEventsFile();
150
+ const claimFile = file + `.claim-${process.pid}-${Date.now()}`;
151
+ try {
152
+ renameSync(file, claimFile);
153
+ const raw = readFileSync(claimFile, 'utf8');
154
+ try { unlinkSync(claimFile); } catch {}
155
+ return raw.trim().split('\n').filter(Boolean).map(line => {
156
+ try { return JSON.parse(line); } catch { return null; }
157
+ }).filter(Boolean);
158
+ } catch {
159
+ return [];
160
+ }
161
+ }
162
+
163
+ /**
164
+ * Extract partial response from CLI error output (timeout/error recovery).
165
+ * @param {Error} error The caught error from execFileSync
166
+ * @returns {string|null} Extracted JSON string or null
167
+ */
168
+ export function _extractResponseFromError(error) {
169
+ const out = error.stdout?.toString?.()?.trim() || error.output?.[1]?.toString?.()?.trim() || '';
170
+ if (out && out.startsWith('{') && out.endsWith('}')) {
171
+ try { JSON.parse(out); return out; } catch { return null; }
172
+ }
173
+ return null;
174
+ }