claude-mem-lite 2.1.6 → 2.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "claude-mem-lite",
3
- "version": "2.1.6",
3
+ "version": "2.2.0",
4
4
  "description": "Lightweight persistent memory system for Claude Code — FTS5 search, episode batching, error-triggered recall",
5
5
  "author": {
6
6
  "name": "sdsrss"
@@ -24,7 +24,7 @@ const ALLOWED_BASES = [
24
24
 
25
25
  function isAllowedPath(filePath) {
26
26
  if (!filePath) return false;
27
- return ALLOWED_BASES.some(base => filePath.startsWith(base));
27
+ return ALLOWED_BASES.some(base => filePath === base || filePath.startsWith(base + '/'));
28
28
  }
29
29
 
30
30
  // ─── Template Detection ──────────────────────────────────────────────────────
package/dispatch.mjs CHANGED
@@ -494,7 +494,7 @@ function inferTechFromPrompt(prompt) {
494
494
  [/\b(typescript|ts)\b/i, 'typescript'],
495
495
  [/\b(python|django|flask|fastapi)\b/i, 'python'],
496
496
  [/\b(rust|cargo)\b/i, 'rust'],
497
- [/\b(golang|go\s+\w+)\b/i, 'go'],
497
+ [/\b(golang|go\s+(?:build|test|run|get|mod|install|fmt|vet|generate|clean|work|tool))\b/i, 'go'],
498
498
  [/\b(java|spring|maven|gradle)\b/i, 'java'],
499
499
  [/\b(ruby|rails)\b/i, 'ruby'],
500
500
  [/\b(php|laravel|symfony)\b/i, 'php'],
@@ -645,7 +645,7 @@ export function isRecentlyRecommended(db, resourceId, sessionId) {
645
645
 
646
646
  // Already recommended in this session (session dedup)
647
647
  const sessionHit = db.prepare(
648
- 'SELECT 1 FROM invocations WHERE resource_id = ? AND session_id = ? LIMIT 1'
648
+ 'SELECT 1 FROM invocations WHERE resource_id = ? AND session_id = ? AND recommended = 1 LIMIT 1'
649
649
  ).get(resourceId, sessionId);
650
650
  if (sessionHit) return true;
651
651
  }
@@ -705,9 +705,9 @@ function applyAdoptionDecay(results) {
705
705
 
706
706
  if (multiplier === 0) return null;
707
707
  if (multiplier < 1) {
708
- // BM25 scores are negative (more negative = more relevant).
709
- // To penalize: divide by multiplier to make less negative (worse rank).
710
- return { ...r, relevance: r.relevance / multiplier, _decayed: true };
708
+ // Composite scores are negative (more negative = more relevant).
709
+ // To penalize: multiply by multiplier (<1) to make less negative (worse rank).
710
+ return { ...r, composite_score: (r.composite_score ?? r.relevance) * multiplier, _decayed: true };
711
711
  }
712
712
  return r;
713
713
  }).filter(Boolean);
@@ -796,7 +796,7 @@ export async function dispatchOnSessionStart(db, userPrompt, sessionId) {
796
796
  if (haikuResult?.query) {
797
797
  const haikuQuery = buildQueryFromText(haikuResult.query);
798
798
  if (haikuQuery) {
799
- const haikuResults = retrieveResources(db, haikuQuery, {
799
+ let haikuResults = retrieveResources(db, haikuQuery, {
800
800
  type: haikuResult.type === 'either' ? undefined : haikuResult.type,
801
801
  limit: 3,
802
802
  projectDomains,
package/hook-context.mjs CHANGED
@@ -154,7 +154,7 @@ export function updateClaudeMd(contextBlock) {
154
154
  const startIdx = content.indexOf(startTag);
155
155
  const endIdx = content.indexOf(endTag);
156
156
 
157
- if (startIdx !== -1 && endIdx !== -1) {
157
+ if (startIdx !== -1 && endIdx !== -1 && startIdx < endIdx) {
158
158
  // Replace existing section in-place — preserves surrounding content (including hint if present)
159
159
  content = content.slice(0, startIdx) + newSection + content.slice(endIdx + endTag.length);
160
160
  } else if (content.length > 0) {
package/hook-episode.mjs CHANGED
@@ -210,13 +210,21 @@ export function mergePendingEntries(episode) {
210
210
 
211
211
  /**
212
212
  * Check if an episode has significant content worth processing with LLM.
213
- * Significant = contains file edits or Bash errors.
213
+ * Significant = contains file edits, Bash errors, or a review/research pattern
214
+ * (5+ Read/Grep entries indicate investigation worth recording).
214
215
  * @param {object} episode The episode to check
215
216
  * @returns {boolean} true if the episode has significant content
216
217
  */
217
218
  export function episodeHasSignificantContent(episode) {
218
- return episode.entries.some(e =>
219
+ const hasEditsOrErrors = episode.entries.some(e =>
219
220
  EDIT_TOOLS.has(e.tool) ||
220
221
  (e.tool === 'Bash' && e.isError)
221
222
  );
223
+ if (hasEditsOrErrors) return true;
224
+
225
+ // Review/research pattern: reading many files indicates investigation
226
+ const readCount = episode.entries.filter(e =>
227
+ e.tool === 'Read' || e.tool === 'Grep'
228
+ ).length;
229
+ return readCount >= 5;
222
230
  }
package/hook-llm.mjs CHANGED
@@ -183,6 +183,47 @@ export function buildDegradedTitle(episode) {
183
183
  return desc.replace(/ → (?:ERROR: )?\{.*$/, hasError ? ' (error)' : '');
184
184
  }
185
185
 
186
+ /**
187
+ * Build a rule-based observation from episode metadata for immediate DB persistence.
188
+ * Used as pre-save (before LLM) and as fallback when LLM is unavailable.
189
+ * @param {object} episode Episode with entries, files, filesRead arrays
190
+ * @returns {object} Observation object ready for saveObservation()
191
+ */
192
+ export function buildImmediateObservation(episode) {
193
+ const hasError = episode.entries.some(e => e.isError);
194
+ const hasEdit = episode.entries.some(e => EDIT_TOOLS.has(e.tool));
195
+ const readCount = episode.entries.filter(e => e.tool === 'Read' || e.tool === 'Grep').length;
196
+ const isReviewPattern = !hasEdit && !hasError && readCount >= 5;
197
+ const inferredType = hasError ? 'bugfix' : hasEdit ? 'change' : 'discovery';
198
+ const fileList = (episode.files || []).map(f => basename(f)).join(', ') || '(multiple)';
199
+
200
+ // Review/research episodes: use a descriptive title with file count
201
+ let title;
202
+ if (isReviewPattern) {
203
+ const allFiles = [...new Set([
204
+ ...(episode.files || []),
205
+ ...(episode.filesRead || []),
206
+ ])].map(f => basename(f));
207
+ const names = allFiles.slice(0, 4).join(', ');
208
+ const suffix = allFiles.length > 4 ? ` +${allFiles.length - 4} more` : '';
209
+ title = truncate(`Reviewed ${allFiles.length} files: ${names}${suffix}`, 120);
210
+ } else {
211
+ title = truncate(buildDegradedTitle(episode), 120);
212
+ }
213
+
214
+ return {
215
+ type: inferredType,
216
+ title,
217
+ subtitle: fileList,
218
+ narrative: episode.entries.map(e => e.desc).join('; '),
219
+ concepts: [],
220
+ facts: [],
221
+ files: episode.files,
222
+ filesRead: episode.filesRead || [],
223
+ importance: isReviewPattern ? Math.max(2, computeRuleImportance(episode)) : computeRuleImportance(episode),
224
+ };
225
+ }
226
+
186
227
  // ─── Background: LLM Episode Extraction (Tier 2 F) ──────────────────────────
187
228
 
188
229
  export async function handleLLMEpisode() {
@@ -282,20 +323,7 @@ importance: 1=routine, 2=notable (error fix, arch decision, config change), 3=cr
282
323
  try { unlinkSync(tmpFile); } catch {}
283
324
  return;
284
325
  }
285
- const hasError = episode.entries.some(e => e.isError);
286
- const hasEdit = episode.entries.some(e => EDIT_TOOLS.has(e.tool));
287
- const inferredType = hasError ? 'bugfix' : hasEdit ? 'change' : 'discovery';
288
- obs = {
289
- type: inferredType,
290
- title: truncate(buildDegradedTitle(episode), 120),
291
- subtitle: fileList,
292
- narrative: episode.entries.map(e => e.desc).join('; '),
293
- concepts: [],
294
- facts: [],
295
- files: episode.files,
296
- filesRead: episode.filesRead || [],
297
- importance: ruleImportance,
298
- };
326
+ obs = buildImmediateObservation(episode);
299
327
  }
300
328
 
301
329
  const db = openDb();
@@ -371,16 +399,25 @@ export async function handleLLMSummary() {
371
399
  if (recentObs.length < 1) return;
372
400
 
373
401
  const obsList = recentObs.map((o, i) =>
374
- `${i + 1}. [${o.type}] ${o.title}${o.narrative ? ': ' + truncate(o.narrative, 80) : ''}`
402
+ `${i + 1}. [${o.type}] ${o.title}${o.narrative ? ': ' + truncate(o.narrative, 200) : ''}`
375
403
  ).join('\n');
376
404
 
405
+ // Include user prompts for richer context
406
+ const userPrompts = db.prepare(`
407
+ SELECT prompt_text FROM user_prompts
408
+ WHERE content_session_id = ? ORDER BY prompt_number ASC LIMIT 10
409
+ `).all(sessionId).map(p => truncate(p.prompt_text, 300));
410
+ const promptCtx = userPrompts.length > 0
411
+ ? `\nUser requests: ${userPrompts.join(' → ')}\n`
412
+ : '';
413
+
377
414
  const prompt = `Summarize this coding session. Return ONLY valid JSON, no markdown fences.
378
415
 
379
- Project: ${project}
416
+ Project: ${project}${promptCtx}
380
417
  Observations (${recentObs.length} total):
381
418
  ${obsList}
382
419
 
383
- JSON: {"request":"what the user was working on","investigated":"what was explored/analyzed","learned":"key findings","completed":"what was accomplished","next_steps":"suggested follow-up"}`;
420
+ JSON: {"request":"what the user was working on","completed":"specific items accomplished with file names","remaining_items":"specific unfinished items from the original request — compare investigation scope with actual changes to infer what was NOT yet done; be precise with file:issue format, or empty string if all done","next_steps":"suggested follow-up"}`;
384
421
 
385
422
  if (!(await acquireLLMSlot())) {
386
423
  debugLog('WARN', 'llm-summary', 'semaphore timeout, skipping summary');
@@ -398,12 +435,13 @@ JSON: {"request":"what the user was working on","investigated":"what was explore
398
435
  if (llmParsed && llmParsed.request) {
399
436
  const now = new Date();
400
437
  db.prepare(`
401
- INSERT INTO session_summaries (memory_session_id, project, request, investigated, learned, completed, next_steps, files_read, files_edited, notes, created_at, created_at_epoch)
402
- VALUES (?, ?, ?, ?, ?, ?, ?, '[]', '[]', '', ?, ?)
438
+ INSERT INTO session_summaries (memory_session_id, project, request, investigated, learned, completed, next_steps, remaining_items, files_read, files_edited, notes, created_at, created_at_epoch)
439
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, '[]', '[]', '', ?, ?)
403
440
  `).run(
404
441
  sessionId, project,
405
442
  llmParsed.request || '', llmParsed.investigated || '', llmParsed.learned || '',
406
443
  llmParsed.completed || '', llmParsed.next_steps || '',
444
+ llmParsed.remaining_items || '',
407
445
  now.toISOString(), now.getTime()
408
446
  );
409
447
  }
package/hook.mjs CHANGED
@@ -10,7 +10,7 @@ import { readFileSync, writeFileSync, unlinkSync, readdirSync, renameSync, statS
10
10
  import {
11
11
  truncate, typeIcon, inferProject, detectBashSignificance,
12
12
  extractErrorKeywords, extractFilePaths, isRelatedToEpisode,
13
- makeEntryDesc, scrubSecrets, computeRuleImportance, EDIT_TOOLS, debugCatch, debugLog, fmtTime,
13
+ makeEntryDesc, scrubSecrets, EDIT_TOOLS, debugCatch, debugLog, fmtTime,
14
14
  } from './utils.mjs';
15
15
  import {
16
16
  readEpisodeRaw, episodeFile,
@@ -29,7 +29,7 @@ import {
29
29
  closeRegistryDb, spawnBackground, appendToolEvent, readAndClearToolEvents,
30
30
  resetInjectionBudget, hasInjectionBudget, incrementInjection,
31
31
  } from './hook-shared.mjs';
32
- import { handleLLMEpisode, handleLLMSummary, saveObservation, buildDegradedTitle } from './hook-llm.mjs';
32
+ import { handleLLMEpisode, handleLLMSummary, saveObservation, buildImmediateObservation } from './hook-llm.mjs';
33
33
  import { searchRelevantMemories } from './hook-memory.mjs';
34
34
  import { buildAndSaveHandoff, detectContinuationIntent, renderHandoffInjection } from './hook-handoff.mjs';
35
35
 
@@ -89,21 +89,7 @@ function flushEpisode(episode) {
89
89
  // LLM background worker will upgrade title/narrative/importance later.
90
90
  if (isSignificant) {
91
91
  try {
92
- const hasError = episode.entries.some(e => e.isError);
93
- const hasEdit = episode.entries.some(e => EDIT_TOOLS.has(e.tool));
94
- const inferredType = hasError ? 'bugfix' : hasEdit ? 'change' : 'discovery';
95
- const fileList = (episode.files || []).map(f => basename(f)).join(', ') || '(multiple)';
96
- const obs = {
97
- type: inferredType,
98
- title: truncate(buildDegradedTitle(episode), 120),
99
- subtitle: fileList,
100
- narrative: episode.entries.map(e => e.desc).join('; '),
101
- concepts: [],
102
- facts: [],
103
- files: episode.files,
104
- filesRead: episode.filesRead || [],
105
- importance: computeRuleImportance(episode),
106
- };
92
+ const obs = buildImmediateObservation(episode);
107
93
  const id = saveObservation(obs, episode.project, episode.sessionId);
108
94
  if (id) episode.savedId = id;
109
95
  } catch (e) { debugCatch(e, 'flushEpisode-immediateSave'); }
@@ -160,7 +146,7 @@ async function handlePostToolUse() {
160
146
 
161
147
  // Skip noise
162
148
  if (SKIP_TOOLS.has(tool_name)) return;
163
- if (tool_name.startsWith('mem_') || tool_name.startsWith('mcp__mem__')) return;
149
+ if (tool_name.startsWith('mem_') || tool_name.startsWith('mcp__mem__') || tool_name.startsWith('mcp__plugin_claude-mem-lite')) return;
164
150
  if (tool_name.startsWith('mcp__sequential') || tool_name.startsWith('mcp__plugin_context7')) return;
165
151
 
166
152
  const resp = typeof tool_response === 'string' ? tool_response : JSON.stringify(tool_response || '');
@@ -347,21 +333,7 @@ async function handleStop() {
347
333
  // Immediate save: persist rule-based observation to DB before spawning background worker.
348
334
  // Without this, data is lost if the background worker fails.
349
335
  try {
350
- const hasError = episode.entries.some(e => e.isError);
351
- const hasEdit = episode.entries.some(e => EDIT_TOOLS.has(e.tool));
352
- const inferredType = hasError ? 'bugfix' : hasEdit ? 'change' : 'discovery';
353
- const fileList = (episode.files || []).map(f => basename(f)).join(', ') || '(multiple)';
354
- const obs = {
355
- type: inferredType,
356
- title: truncate(buildDegradedTitle(episode), 120),
357
- subtitle: fileList,
358
- narrative: episode.entries.map(e => e.desc).join('; '),
359
- concepts: [],
360
- facts: [],
361
- files: episode.files,
362
- filesRead: episode.filesRead || [],
363
- importance: computeRuleImportance(episode),
364
- };
336
+ const obs = buildImmediateObservation(episode);
365
337
  const id = saveObservation(obs, episode.project, episode.sessionId);
366
338
  if (id) episode.savedId = id;
367
339
  } catch (e) { debugCatch(e, 'handleStop-fallback-immediateSave'); }
@@ -906,6 +878,7 @@ async function handleResourceScan() {
906
878
  }
907
879
 
908
880
  // Upsert changed resources with fallback metadata (no Haiku)
881
+ let firstErr = true;
909
882
  for (const res of toIndex) {
910
883
  try {
911
884
  upsertResource(rdb, {
@@ -920,7 +893,7 @@ async function handleResourceScan() {
920
893
  trigger_patterns: `when user needs ${res.name.replace(/-/g, ' ').replace(/\//g, ' ')}`,
921
894
  capability_summary: `${res.type}: ${res.name.replace(/-/g, ' ')}`,
922
895
  });
923
- } catch {}
896
+ } catch (e) { if (firstErr) { debugCatch(e, 'handleResourceScan-upsert'); firstErr = false; } }
924
897
  }
925
898
 
926
899
  // Disable resources no longer on filesystem
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "claude-mem-lite",
3
- "version": "2.1.6",
3
+ "version": "2.2.0",
4
4
  "description": "Lightweight persistent memory system for Claude Code",
5
5
  "type": "module",
6
6
  "engines": {
@@ -54,13 +54,13 @@ function fallbackExtract(resource) {
54
54
  infra: 'infrastructure,devops,cloud',
55
55
  };
56
56
 
57
- let intentTags = '';
57
+ const intentTagSet = new Set();
58
58
  for (const [key, tags] of Object.entries(intentMap)) {
59
59
  if (name.includes(key) || content.includes(key)) {
60
- intentTags = tags;
61
- break;
60
+ for (const t of tags.split(',')) intentTagSet.add(t);
62
61
  }
63
62
  }
63
+ const intentTags = [...intentTagSet].join(',');
64
64
 
65
65
  // Infer domain tags from content
66
66
  const domainPatterns = [
@@ -197,8 +197,9 @@ export function buildEnhancedQuery(signals) {
197
197
  // directly across name, intent_tags, capability_summary, trigger_patterns.
198
198
  if (signals.rawKeywords?.length > 0) {
199
199
  for (const kw of signals.rawKeywords) {
200
- parts.push(`intent_tags:${kw}`);
201
- parts.push(kw); // literal, no synonym expansion
200
+ const safeKw = expandToken(kw);
201
+ parts.push(`intent_tags:${safeKw}`);
202
+ parts.push(safeKw);
202
203
  }
203
204
  }
204
205
 
@@ -376,27 +377,31 @@ const COMPOSITE_EXPR = `(
376
377
  const COMPOSITE_ORDER = `ORDER BY ${COMPOSITE_EXPR} ASC`;
377
378
 
378
379
  const SEARCH_SQL = `
379
- SELECT r.*,
380
- bm25(resources_fts, 5.0, 3.0, 3.0, 2.0, 2.0, 1.0, 1.0, 1.0) AS relevance,
381
- ${COMPOSITE_EXPR} AS composite_score
382
- FROM resources_fts
383
- JOIN resources r ON r.id = resources_fts.rowid
384
- WHERE resources_fts MATCH ?
385
- AND r.status = 'active'
386
- ORDER BY ${COMPOSITE_EXPR} ASC
380
+ SELECT *, composite_score FROM (
381
+ SELECT r.*,
382
+ bm25(resources_fts, 5.0, 3.0, 3.0, 2.0, 2.0, 1.0, 1.0, 1.0) AS relevance,
383
+ ${COMPOSITE_EXPR} AS composite_score
384
+ FROM resources_fts
385
+ JOIN resources r ON r.id = resources_fts.rowid
386
+ WHERE resources_fts MATCH ?
387
+ AND r.status = 'active'
388
+ ) sub
389
+ ORDER BY composite_score ASC
387
390
  LIMIT ?
388
391
  `;
389
392
 
390
393
  const SEARCH_BY_TYPE_SQL = `
391
- SELECT r.*,
392
- bm25(resources_fts, 5.0, 3.0, 3.0, 2.0, 2.0, 1.0, 1.0, 1.0) AS relevance,
393
- ${COMPOSITE_EXPR} AS composite_score
394
- FROM resources_fts
395
- JOIN resources r ON r.id = resources_fts.rowid
396
- WHERE resources_fts MATCH ?
397
- AND r.status = 'active'
398
- AND r.type = ?
399
- ${COMPOSITE_ORDER}
394
+ SELECT *, composite_score FROM (
395
+ SELECT r.*,
396
+ bm25(resources_fts, 5.0, 3.0, 3.0, 2.0, 2.0, 1.0, 1.0, 1.0) AS relevance,
397
+ ${COMPOSITE_EXPR} AS composite_score
398
+ FROM resources_fts
399
+ JOIN resources r ON r.id = resources_fts.rowid
400
+ WHERE resources_fts MATCH ?
401
+ AND r.status = 'active'
402
+ AND r.type = ?
403
+ ) sub
404
+ ORDER BY composite_score ASC
400
405
  LIMIT ?
401
406
  `;
402
407
 
package/registry.mjs CHANGED
@@ -4,7 +4,7 @@
4
4
  import Database from 'better-sqlite3';
5
5
  import { existsSync, mkdirSync } from 'fs';
6
6
  import { dirname } from 'path';
7
- // debugLog, debugCatch available from utils.mjs if needed
7
+ import { debugCatch } from './utils.mjs';
8
8
 
9
9
  // ─── Schema ──────────────────────────────────────────────────────────────────
10
10
 
@@ -178,6 +178,9 @@ export function ensureRegistryDb(dbPath) {
178
178
  const schema = db.prepare(`SELECT sql FROM sqlite_master WHERE type='table' AND name='invocations'`).get();
179
179
  if (schema?.sql && !schema.sql.includes('user_prompt')) {
180
180
  db.transaction(() => {
181
+ // Clean up leftover from previous failed migration attempt
182
+ const hasOld = db.prepare(`SELECT 1 FROM sqlite_master WHERE type='table' AND name='invocations_old'`).get();
183
+ if (hasOld) db.exec(`DROP TABLE invocations_old`);
181
184
  db.exec(`ALTER TABLE invocations RENAME TO invocations_old`);
182
185
  db.exec(INVOCATIONS_SCHEMA);
183
186
  db.exec(`INSERT INTO invocations
@@ -187,7 +190,7 @@ export function ensureRegistryDb(dbPath) {
187
190
  db.exec(`DROP TABLE invocations_old`);
188
191
  })();
189
192
  }
190
- } catch {}
193
+ } catch (e) { debugCatch(e, 'ensureRegistryDb-migration'); }
191
194
 
192
195
  db.exec(PREINSTALLED_SCHEMA);
193
196
 
@@ -223,7 +226,7 @@ const UPSERT_SQL = `
223
226
  */
224
227
  export function upsertResource(db, r) {
225
228
  return db.transaction(() => {
226
- const result = db.prepare(UPSERT_SQL).run(
229
+ db.prepare(UPSERT_SQL).run(
227
230
  r.name, r.type, r.status || 'active', r.source || 'preinstalled',
228
231
  r.repo_url || null, r.repo_stars || 0, r.local_path,
229
232
  r.file_hash || null, r.invocation_name || '',
@@ -233,7 +236,6 @@ export function upsertResource(db, r) {
233
236
  r.keywords || '', r.tech_stack || '', r.use_cases || '', r.complexity || 'intermediate',
234
237
  r.indexed_at || null
235
238
  );
236
- if (result.changes > 0 && result.lastInsertRowid) return Number(result.lastInsertRowid);
237
239
  const row = db.prepare('SELECT id FROM resources WHERE type = ? AND name = ?').get(r.type, r.name);
238
240
  return row?.id || 0;
239
241
  })();
package/schema.mjs CHANGED
@@ -97,6 +97,7 @@ const MIGRATIONS = [
97
97
  'ALTER TABLE observations ADD COLUMN minhash_sig TEXT',
98
98
  'ALTER TABLE observations ADD COLUMN access_count INTEGER DEFAULT 0',
99
99
  'ALTER TABLE observations ADD COLUMN compressed_into INTEGER DEFAULT NULL',
100
+ 'ALTER TABLE session_summaries ADD COLUMN remaining_items TEXT',
100
101
  ];
101
102
 
102
103
  /**
@@ -153,7 +154,7 @@ export function initSchema(db) {
153
154
 
154
155
  // FTS5 full-text search tables + triggers (idempotent)
155
156
  ensureFTS(db, 'observations_fts', 'observations', ['title', 'subtitle', 'narrative', 'text', 'facts', 'concepts']);
156
- ensureFTS(db, 'session_summaries_fts', 'session_summaries', ['request', 'investigated', 'learned', 'completed', 'next_steps', 'notes']);
157
+ ensureFTS(db, 'session_summaries_fts', 'session_summaries', ['request', 'investigated', 'learned', 'completed', 'next_steps', 'notes', 'remaining_items']);
157
158
  ensureFTS(db, 'user_prompts_fts', 'user_prompts', ['prompt_text']);
158
159
 
159
160
  return db;
@@ -196,7 +197,12 @@ export function ensureDb() {
196
197
  db.pragma('synchronous = NORMAL');
197
198
  db.pragma('foreign_keys = OFF'); // Enabled after dedup migration
198
199
 
199
- return initSchema(db);
200
+ try {
201
+ return initSchema(db);
202
+ } catch (e) {
203
+ try { db.close(); } catch {}
204
+ throw e;
205
+ }
200
206
  }
201
207
 
202
208
  /**
@@ -211,10 +217,12 @@ export function ensureDb() {
211
217
  */
212
218
  export function rebuildFTS(db) {
213
219
  const FTS_TABLES = ['observations_fts', 'session_summaries_fts', 'user_prompts_fts'];
220
+ const idRe = /^[a-z][a-z0-9_]*$/;
214
221
  const rebuilt = [];
215
222
  const errors = [];
216
223
  for (const fts of FTS_TABLES) {
217
224
  try {
225
+ if (!idRe.test(fts)) { errors.push(`${fts}: invalid identifier`); continue; }
218
226
  const exists = db.prepare(`SELECT 1 FROM sqlite_master WHERE type='table' AND name=?`).get(fts);
219
227
  if (!exists) { errors.push(`${fts}: not found`); continue; }
220
228
  db.exec(`INSERT INTO ${fts}(${fts}) VALUES('rebuild')`);
@@ -233,10 +241,12 @@ export function rebuildFTS(db) {
233
241
  */
234
242
  export function checkFTSIntegrity(db) {
235
243
  const FTS_TABLES = ['observations_fts', 'session_summaries_fts', 'user_prompts_fts'];
244
+ const idRe = /^[a-z][a-z0-9_]*$/;
236
245
  const details = [];
237
246
  let healthy = true;
238
247
  for (const fts of FTS_TABLES) {
239
248
  try {
249
+ if (!idRe.test(fts)) { details.push(`${fts}: invalid identifier`); healthy = false; continue; }
240
250
  const exists = db.prepare(`SELECT 1 FROM sqlite_master WHERE type='table' AND name=?`).get(fts);
241
251
  if (!exists) { details.push(`${fts}: missing`); healthy = false; continue; }
242
252
  db.exec(`INSERT INTO ${fts}(${fts}) VALUES('integrity-check')`);
@@ -254,7 +264,7 @@ export function ensureFTS(db, ftsName, tableName, columns) {
254
264
  if (exists) return;
255
265
 
256
266
  // Validate identifiers to prevent SQL injection
257
- const idRe = /^[a-z_]+$/;
267
+ const idRe = /^[a-z][a-z0-9_]*$/;
258
268
  if (!idRe.test(ftsName) || !idRe.test(tableName) || !columns.every(c => idRe.test(c))) {
259
269
  throw new Error(`Invalid identifier in ensureFTS: ${ftsName}, ${tableName}`);
260
270
  }
@@ -50,7 +50,7 @@ case "$tool" in
50
50
  exit 0
51
51
  ;;
52
52
  # Prefix filters
53
- mem_*|mcp__mem__*|mcp__sequential*|mcp__plugin_context7*)
53
+ mem_*|mcp__mem__*|mcp__plugin_claude-mem-lite*|mcp__sequential*|mcp__plugin_context7*)
54
54
  exit 0
55
55
  ;;
56
56
  esac
package/server.mjs CHANGED
@@ -61,7 +61,7 @@ const RECENCY_HALF_LIFE_MS = 1209600000; // 14 days in milliseconds
61
61
  // ─── MCP Server ─────────────────────────────────────────────────────────────
62
62
 
63
63
  const server = new McpServer(
64
- { name: 'claude-mem-lite', version: '2.0.0' },
64
+ { name: 'claude-mem-lite', version: '2.1.6' },
65
65
  {
66
66
  instructions: [
67
67
  'Proactively search memory to leverage past experience. This is your long-term memory across sessions.',
package/utils.mjs CHANGED
@@ -622,12 +622,11 @@ export function fmtTime(iso) {
622
622
  */
623
623
  export function isoWeekKey(epochMs) {
624
624
  const d = new Date(epochMs);
625
- const tmp = new Date(d.getTime());
626
- tmp.setHours(0, 0, 0, 0);
627
- tmp.setDate(tmp.getDate() + 4 - (tmp.getDay() || 7));
628
- const yearStart = new Date(tmp.getFullYear(), 0, 1);
625
+ const tmp = new Date(Date.UTC(d.getUTCFullYear(), d.getUTCMonth(), d.getUTCDate()));
626
+ tmp.setUTCDate(tmp.getUTCDate() + 4 - (tmp.getUTCDay() || 7));
627
+ const yearStart = new Date(Date.UTC(tmp.getUTCFullYear(), 0, 1));
629
628
  const weekNum = Math.ceil(((tmp - yearStart) / 86400000 + 1) / 7);
630
- const isoYear = tmp.getFullYear();
629
+ const isoYear = tmp.getUTCFullYear();
631
630
  return `${isoYear}-W${String(weekNum).padStart(2, '0')}`;
632
631
  }
633
632