claude-memory-layer 1.0.0 → 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/.claude/settings.local.json +15 -0
  2. package/.history/package_20260201114632.json +46 -0
  3. package/dist/cli/index.js +360 -154
  4. package/dist/cli/index.js.map +4 -4
  5. package/dist/core/index.js +337 -161
  6. package/dist/core/index.js.map +3 -3
  7. package/dist/hooks/session-end.js +320 -130
  8. package/dist/hooks/session-end.js.map +4 -4
  9. package/dist/hooks/session-start.js +331 -138
  10. package/dist/hooks/session-start.js.map +4 -4
  11. package/dist/hooks/stop.js +320 -130
  12. package/dist/hooks/stop.js.map +4 -4
  13. package/dist/hooks/user-prompt-submit.js +320 -130
  14. package/dist/hooks/user-prompt-submit.js.map +4 -4
  15. package/dist/services/memory-service.js +349 -128
  16. package/dist/services/memory-service.js.map +4 -4
  17. package/package.json +1 -1
  18. package/src/cli/index.ts +84 -23
  19. package/src/core/consolidated-store.ts +33 -18
  20. package/src/core/continuity-manager.ts +12 -7
  21. package/src/core/db-wrapper.ts +112 -0
  22. package/src/core/edge-repo.ts +22 -13
  23. package/src/core/entity-repo.ts +23 -14
  24. package/src/core/event-store.ts +98 -72
  25. package/src/core/task/blocker-resolver.ts +17 -9
  26. package/src/core/task/task-matcher.ts +8 -6
  27. package/src/core/task/task-projector.ts +29 -16
  28. package/src/core/task/task-resolver.ts +17 -9
  29. package/src/core/vector-outbox.ts +29 -16
  30. package/src/core/vector-store.ts +23 -12
  31. package/src/core/vector-worker.ts +7 -4
  32. package/src/core/working-set-store.ts +31 -18
  33. package/src/hooks/session-end.ts +3 -2
  34. package/src/hooks/session-start.ts +12 -8
  35. package/src/hooks/stop.ts +3 -2
  36. package/src/hooks/user-prompt-submit.ts +3 -2
  37. package/src/services/memory-service.ts +158 -6
@@ -12,7 +12,6 @@ import * as fs from "fs";
12
12
  import * as crypto2 from "crypto";
13
13
 
14
14
  // src/core/event-store.ts
15
- import { Database } from "duckdb";
16
15
  import { randomUUID } from "crypto";
17
16
 
18
17
  // src/core/canonical-key.ts
@@ -38,11 +37,92 @@ function makeDedupeKey(content, sessionId) {
38
37
  return `${sessionId}:${contentHash}`;
39
38
  }
40
39
 
40
+ // src/core/db-wrapper.ts
41
+ import duckdb from "duckdb";
42
+ function convertBigInts(obj) {
43
+ if (obj === null || obj === void 0)
44
+ return obj;
45
+ if (typeof obj === "bigint")
46
+ return Number(obj);
47
+ if (obj instanceof Date)
48
+ return obj;
49
+ if (Array.isArray(obj))
50
+ return obj.map(convertBigInts);
51
+ if (typeof obj === "object") {
52
+ const result = {};
53
+ for (const [key, value] of Object.entries(obj)) {
54
+ result[key] = convertBigInts(value);
55
+ }
56
+ return result;
57
+ }
58
+ return obj;
59
+ }
60
+ function toDate(value) {
61
+ if (value instanceof Date)
62
+ return value;
63
+ if (typeof value === "string")
64
+ return new Date(value);
65
+ if (typeof value === "number")
66
+ return new Date(value);
67
+ return new Date(String(value));
68
+ }
69
+ function createDatabase(path2) {
70
+ return new duckdb.Database(path2);
71
+ }
72
+ function dbRun(db, sql, params = []) {
73
+ return new Promise((resolve2, reject) => {
74
+ if (params.length === 0) {
75
+ db.run(sql, (err) => {
76
+ if (err)
77
+ reject(err);
78
+ else
79
+ resolve2();
80
+ });
81
+ } else {
82
+ db.run(sql, ...params, (err) => {
83
+ if (err)
84
+ reject(err);
85
+ else
86
+ resolve2();
87
+ });
88
+ }
89
+ });
90
+ }
91
+ function dbAll(db, sql, params = []) {
92
+ return new Promise((resolve2, reject) => {
93
+ if (params.length === 0) {
94
+ db.all(sql, (err, rows) => {
95
+ if (err)
96
+ reject(err);
97
+ else
98
+ resolve2(convertBigInts(rows || []));
99
+ });
100
+ } else {
101
+ db.all(sql, ...params, (err, rows) => {
102
+ if (err)
103
+ reject(err);
104
+ else
105
+ resolve2(convertBigInts(rows || []));
106
+ });
107
+ }
108
+ });
109
+ }
110
+ function dbClose(db) {
111
+ return new Promise((resolve2, reject) => {
112
+ db.close((err) => {
113
+ if (err)
114
+ reject(err);
115
+ else
116
+ resolve2();
117
+ });
118
+ });
119
+ }
120
+
41
121
  // src/core/event-store.ts
42
122
  var EventStore = class {
43
123
  constructor(dbPath) {
44
124
  this.dbPath = dbPath;
45
- this.db = new Database(dbPath);
125
+ this.db = createDatabase(dbPath);
46
126
  }
47
127
  db;
48
128
  initialized = false;
@@ -52,7 +132,7 @@ var EventStore = class {
52
132
  async initialize() {
53
133
  if (this.initialized)
54
134
  return;
55
- await this.db.run(`
135
+ await dbRun(this.db, `
56
136
  CREATE TABLE IF NOT EXISTS events (
57
137
  id VARCHAR PRIMARY KEY,
58
138
  event_type VARCHAR NOT NULL,
@@ -64,14 +144,14 @@ var EventStore = class {
64
144
  metadata JSON
65
145
  )
66
146
  `);
67
- await this.db.run(`
147
+ await dbRun(this.db, `
68
148
  CREATE TABLE IF NOT EXISTS event_dedup (
69
149
  dedupe_key VARCHAR PRIMARY KEY,
70
150
  event_id VARCHAR NOT NULL,
71
151
  created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
72
152
  )
73
153
  `);
74
- await this.db.run(`
154
+ await dbRun(this.db, `
75
155
  CREATE TABLE IF NOT EXISTS sessions (
76
156
  id VARCHAR PRIMARY KEY,
77
157
  started_at TIMESTAMP NOT NULL,
@@ -81,7 +161,7 @@ var EventStore = class {
81
161
  tags JSON
82
162
  )
83
163
  `);
84
- await this.db.run(`
164
+ await dbRun(this.db, `
85
165
  CREATE TABLE IF NOT EXISTS insights (
86
166
  id VARCHAR PRIMARY KEY,
87
167
  insight_type VARCHAR NOT NULL,
@@ -93,7 +173,7 @@ var EventStore = class {
93
173
  last_updated TIMESTAMP
94
174
  )
95
175
  `);
96
- await this.db.run(`
176
+ await dbRun(this.db, `
97
177
  CREATE TABLE IF NOT EXISTS embedding_outbox (
98
178
  id VARCHAR PRIMARY KEY,
99
179
  event_id VARCHAR NOT NULL,
@@ -105,7 +185,7 @@ var EventStore = class {
105
185
  error_message TEXT
106
186
  )
107
187
  `);
108
- await this.db.run(`
188
+ await dbRun(this.db, `
109
189
  CREATE TABLE IF NOT EXISTS projection_offsets (
110
190
  projection_name VARCHAR PRIMARY KEY,
111
191
  last_event_id VARCHAR,
@@ -113,14 +193,14 @@ var EventStore = class {
113
193
  updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
114
194
  )
115
195
  `);
116
- await this.db.run(`
196
+ await dbRun(this.db, `
117
197
  CREATE TABLE IF NOT EXISTS memory_levels (
118
198
  event_id VARCHAR PRIMARY KEY,
119
199
  level VARCHAR NOT NULL DEFAULT 'L0',
120
200
  promoted_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
121
201
  )
122
202
  `);
123
- await this.db.run(`
203
+ await dbRun(this.db, `
124
204
  CREATE TABLE IF NOT EXISTS entries (
125
205
  entry_id VARCHAR PRIMARY KEY,
126
206
  created_ts TIMESTAMP NOT NULL,
@@ -136,7 +216,7 @@ var EventStore = class {
136
216
  created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
137
217
  )
138
218
  `);
139
- await this.db.run(`
219
+ await dbRun(this.db, `
140
220
  CREATE TABLE IF NOT EXISTS entities (
141
221
  entity_id VARCHAR PRIMARY KEY,
142
222
  entity_type VARCHAR NOT NULL,
@@ -151,7 +231,7 @@ var EventStore = class {
151
231
  updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
152
232
  )
153
233
  `);
154
- await this.db.run(`
234
+ await dbRun(this.db, `
155
235
  CREATE TABLE IF NOT EXISTS entity_aliases (
156
236
  entity_type VARCHAR NOT NULL,
157
237
  canonical_key VARCHAR NOT NULL,
@@ -161,7 +241,7 @@ var EventStore = class {
161
241
  PRIMARY KEY(entity_type, canonical_key)
162
242
  )
163
243
  `);
164
- await this.db.run(`
244
+ await dbRun(this.db, `
165
245
  CREATE TABLE IF NOT EXISTS edges (
166
246
  edge_id VARCHAR PRIMARY KEY,
167
247
  src_type VARCHAR NOT NULL,
@@ -173,7 +253,7 @@ var EventStore = class {
173
253
  created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
174
254
  )
175
255
  `);
176
- await this.db.run(`
256
+ await dbRun(this.db, `
177
257
  CREATE TABLE IF NOT EXISTS vector_outbox (
178
258
  job_id VARCHAR PRIMARY KEY,
179
259
  item_kind VARCHAR NOT NULL,
@@ -187,7 +267,7 @@ var EventStore = class {
187
267
  UNIQUE(item_kind, item_id, embedding_version)
188
268
  )
189
269
  `);
190
- await this.db.run(`
270
+ await dbRun(this.db, `
191
271
  CREATE TABLE IF NOT EXISTS build_runs (
192
272
  build_id VARCHAR PRIMARY KEY,
193
273
  started_at TIMESTAMP NOT NULL,
@@ -202,7 +282,7 @@ var EventStore = class {
202
282
  error VARCHAR
203
283
  )
204
284
  `);
205
- await this.db.run(`
285
+ await dbRun(this.db, `
206
286
  CREATE TABLE IF NOT EXISTS pipeline_metrics (
207
287
  id VARCHAR PRIMARY KEY,
208
288
  ts TIMESTAMP NOT NULL,
@@ -213,7 +293,7 @@ var EventStore = class {
213
293
  session_id VARCHAR
214
294
  )
215
295
  `);
216
- await this.db.run(`
296
+ await dbRun(this.db, `
217
297
  CREATE TABLE IF NOT EXISTS working_set (
218
298
  id VARCHAR PRIMARY KEY,
219
299
  event_id VARCHAR NOT NULL,
@@ -223,7 +303,7 @@ var EventStore = class {
223
303
  expires_at TIMESTAMP
224
304
  )
225
305
  `);
226
- await this.db.run(`
306
+ await dbRun(this.db, `
227
307
  CREATE TABLE IF NOT EXISTS consolidated_memories (
228
308
  memory_id VARCHAR PRIMARY KEY,
229
309
  summary TEXT NOT NULL,
@@ -235,7 +315,7 @@ var EventStore = class {
235
315
  access_count INTEGER DEFAULT 0
236
316
  )
237
317
  `);
238
- await this.db.run(`
318
+ await dbRun(this.db, `
239
319
  CREATE TABLE IF NOT EXISTS continuity_log (
240
320
  log_id VARCHAR PRIMARY KEY,
241
321
  from_context_id VARCHAR,
@@ -245,26 +325,26 @@ var EventStore = class {
245
325
  created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
246
326
  )
247
327
  `);
248
- await this.db.run(`
328
+ await dbRun(this.db, `
249
329
  CREATE TABLE IF NOT EXISTS endless_config (
250
330
  key VARCHAR PRIMARY KEY,
251
331
  value JSON,
252
332
  updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
253
333
  )
254
334
  `);
255
- await this.db.run(`CREATE INDEX IF NOT EXISTS idx_entries_type ON entries(entry_type)`);
256
- await this.db.run(`CREATE INDEX IF NOT EXISTS idx_entries_stage ON entries(stage)`);
257
- await this.db.run(`CREATE INDEX IF NOT EXISTS idx_entries_canonical ON entries(canonical_key)`);
258
- await this.db.run(`CREATE INDEX IF NOT EXISTS idx_entities_type_key ON entities(entity_type, canonical_key)`);
259
- await this.db.run(`CREATE INDEX IF NOT EXISTS idx_entities_status ON entities(status)`);
260
- await this.db.run(`CREATE INDEX IF NOT EXISTS idx_edges_src ON edges(src_id, rel_type)`);
261
- await this.db.run(`CREATE INDEX IF NOT EXISTS idx_edges_dst ON edges(dst_id, rel_type)`);
262
- await this.db.run(`CREATE INDEX IF NOT EXISTS idx_edges_rel ON edges(rel_type)`);
263
- await this.db.run(`CREATE INDEX IF NOT EXISTS idx_outbox_status ON vector_outbox(status)`);
264
- await this.db.run(`CREATE INDEX IF NOT EXISTS idx_working_set_expires ON working_set(expires_at)`);
265
- await this.db.run(`CREATE INDEX IF NOT EXISTS idx_working_set_relevance ON working_set(relevance_score DESC)`);
266
- await this.db.run(`CREATE INDEX IF NOT EXISTS idx_consolidated_confidence ON consolidated_memories(confidence DESC)`);
267
- await this.db.run(`CREATE INDEX IF NOT EXISTS idx_continuity_created ON continuity_log(created_at)`);
335
+ await dbRun(this.db, `CREATE INDEX IF NOT EXISTS idx_entries_type ON entries(entry_type)`);
336
+ await dbRun(this.db, `CREATE INDEX IF NOT EXISTS idx_entries_stage ON entries(stage)`);
337
+ await dbRun(this.db, `CREATE INDEX IF NOT EXISTS idx_entries_canonical ON entries(canonical_key)`);
338
+ await dbRun(this.db, `CREATE INDEX IF NOT EXISTS idx_entities_type_key ON entities(entity_type, canonical_key)`);
339
+ await dbRun(this.db, `CREATE INDEX IF NOT EXISTS idx_entities_status ON entities(status)`);
340
+ await dbRun(this.db, `CREATE INDEX IF NOT EXISTS idx_edges_src ON edges(src_id, rel_type)`);
341
+ await dbRun(this.db, `CREATE INDEX IF NOT EXISTS idx_edges_dst ON edges(dst_id, rel_type)`);
342
+ await dbRun(this.db, `CREATE INDEX IF NOT EXISTS idx_edges_rel ON edges(rel_type)`);
343
+ await dbRun(this.db, `CREATE INDEX IF NOT EXISTS idx_outbox_status ON vector_outbox(status)`);
344
+ await dbRun(this.db, `CREATE INDEX IF NOT EXISTS idx_working_set_expires ON working_set(expires_at)`);
345
+ await dbRun(this.db, `CREATE INDEX IF NOT EXISTS idx_working_set_relevance ON working_set(relevance_score DESC)`);
346
+ await dbRun(this.db, `CREATE INDEX IF NOT EXISTS idx_consolidated_confidence ON consolidated_memories(confidence DESC)`);
347
+ await dbRun(this.db, `CREATE INDEX IF NOT EXISTS idx_continuity_created ON continuity_log(created_at)`);
268
348
  this.initialized = true;
269
349
  }
270
350
  /**
@@ -275,7 +355,8 @@ var EventStore = class {
275
355
  await this.initialize();
276
356
  const canonicalKey = makeCanonicalKey(input.content);
277
357
  const dedupeKey = makeDedupeKey(input.content, input.sessionId);
278
- const existing = await this.db.all(
358
+ const existing = await dbAll(
359
+ this.db,
279
360
  `SELECT event_id FROM event_dedup WHERE dedupe_key = ?`,
280
361
  [dedupeKey]
281
362
  );
@@ -289,7 +370,8 @@ var EventStore = class {
289
370
  const id = randomUUID();
290
371
  const timestamp = input.timestamp.toISOString();
291
372
  try {
292
- await this.db.run(
373
+ await dbRun(
374
+ this.db,
293
375
  `INSERT INTO events (id, event_type, session_id, timestamp, content, canonical_key, dedupe_key, metadata)
294
376
  VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
295
377
  [
@@ -303,11 +385,13 @@ var EventStore = class {
303
385
  JSON.stringify(input.metadata || {})
304
386
  ]
305
387
  );
306
- await this.db.run(
388
+ await dbRun(
389
+ this.db,
307
390
  `INSERT INTO event_dedup (dedupe_key, event_id) VALUES (?, ?)`,
308
391
  [dedupeKey, id]
309
392
  );
310
- await this.db.run(
393
+ await dbRun(
394
+ this.db,
311
395
  `INSERT INTO memory_levels (event_id, level) VALUES (?, 'L0')`,
312
396
  [id]
313
397
  );
@@ -324,7 +408,8 @@ var EventStore = class {
324
408
  */
325
409
  async getSessionEvents(sessionId) {
326
410
  await this.initialize();
327
- const rows = await this.db.all(
411
+ const rows = await dbAll(
412
+ this.db,
328
413
  `SELECT * FROM events WHERE session_id = ? ORDER BY timestamp ASC`,
329
414
  [sessionId]
330
415
  );
@@ -335,7 +420,8 @@ var EventStore = class {
335
420
  */
336
421
  async getRecentEvents(limit = 100) {
337
422
  await this.initialize();
338
- const rows = await this.db.all(
423
+ const rows = await dbAll(
424
+ this.db,
339
425
  `SELECT * FROM events ORDER BY timestamp DESC LIMIT ?`,
340
426
  [limit]
341
427
  );
@@ -346,7 +432,8 @@ var EventStore = class {
346
432
  */
347
433
  async getEvent(id) {
348
434
  await this.initialize();
349
- const rows = await this.db.all(
435
+ const rows = await dbAll(
436
+ this.db,
350
437
  `SELECT * FROM events WHERE id = ?`,
351
438
  [id]
352
439
  );
@@ -359,12 +446,14 @@ var EventStore = class {
359
446
  */
360
447
  async upsertSession(session) {
361
448
  await this.initialize();
362
- const existing = await this.db.all(
449
+ const existing = await dbAll(
450
+ this.db,
363
451
  `SELECT id FROM sessions WHERE id = ?`,
364
452
  [session.id]
365
453
  );
366
454
  if (existing.length === 0) {
367
- await this.db.run(
455
+ await dbRun(
456
+ this.db,
368
457
  `INSERT INTO sessions (id, started_at, project_path, tags)
369
458
  VALUES (?, ?, ?, ?)`,
370
459
  [
@@ -391,7 +480,8 @@ var EventStore = class {
391
480
  }
392
481
  if (updates.length > 0) {
393
482
  values.push(session.id);
394
- await this.db.run(
483
+ await dbRun(
484
+ this.db,
395
485
  `UPDATE sessions SET ${updates.join(", ")} WHERE id = ?`,
396
486
  values
397
487
  );
@@ -403,7 +493,8 @@ var EventStore = class {
403
493
  */
404
494
  async getSession(id) {
405
495
  await this.initialize();
406
- const rows = await this.db.all(
496
+ const rows = await dbAll(
497
+ this.db,
407
498
  `SELECT * FROM sessions WHERE id = ?`,
408
499
  [id]
409
500
  );
@@ -412,8 +503,8 @@ var EventStore = class {
412
503
  const row = rows[0];
413
504
  return {
414
505
  id: row.id,
415
- startedAt: new Date(row.started_at),
416
- endedAt: row.ended_at ? new Date(row.ended_at) : void 0,
506
+ startedAt: toDate(row.started_at),
507
+ endedAt: row.ended_at ? toDate(row.ended_at) : void 0,
417
508
  projectPath: row.project_path,
418
509
  summary: row.summary,
419
510
  tags: row.tags ? JSON.parse(row.tags) : void 0
@@ -425,7 +516,8 @@ var EventStore = class {
425
516
  async enqueueForEmbedding(eventId, content) {
426
517
  await this.initialize();
427
518
  const id = randomUUID();
428
- await this.db.run(
519
+ await dbRun(
520
+ this.db,
429
521
  `INSERT INTO embedding_outbox (id, event_id, content, status, retry_count)
430
522
  VALUES (?, ?, ?, 'pending', 0)`,
431
523
  [id, eventId, content]
@@ -437,25 +529,30 @@ var EventStore = class {
437
529
  */
438
530
  async getPendingOutboxItems(limit = 32) {
439
531
  await this.initialize();
440
- const rows = await this.db.all(
441
- `UPDATE embedding_outbox
442
- SET status = 'processing'
443
- WHERE id IN (
444
- SELECT id FROM embedding_outbox
445
- WHERE status = 'pending'
446
- ORDER BY created_at
447
- LIMIT ?
448
- )
449
- RETURNING *`,
532
+ const pending = await dbAll(
533
+ this.db,
534
+ `SELECT * FROM embedding_outbox
535
+ WHERE status = 'pending'
536
+ ORDER BY created_at
537
+ LIMIT ?`,
450
538
  [limit]
451
539
  );
452
- return rows.map((row) => ({
540
+ if (pending.length === 0)
541
+ return [];
542
+ const ids = pending.map((r) => r.id);
543
+ const placeholders = ids.map(() => "?").join(",");
544
+ await dbRun(
545
+ this.db,
546
+ `UPDATE embedding_outbox SET status = 'processing' WHERE id IN (${placeholders})`,
547
+ ids
548
+ );
549
+ return pending.map((row) => ({
453
550
  id: row.id,
454
551
  eventId: row.event_id,
455
552
  content: row.content,
456
- status: row.status,
553
+ status: "processing",
457
554
  retryCount: row.retry_count,
458
- createdAt: new Date(row.created_at),
555
+ createdAt: toDate(row.created_at),
459
556
  errorMessage: row.error_message
460
557
  }));
461
558
  }
@@ -466,7 +563,8 @@ var EventStore = class {
466
563
  if (ids.length === 0)
467
564
  return;
468
565
  const placeholders = ids.map(() => "?").join(",");
469
- await this.db.run(
566
+ await dbRun(
567
+ this.db,
470
568
  `DELETE FROM embedding_outbox WHERE id IN (${placeholders})`,
471
569
  ids
472
570
  );
@@ -478,7 +576,8 @@ var EventStore = class {
478
576
  if (ids.length === 0)
479
577
  return;
480
578
  const placeholders = ids.map(() => "?").join(",");
481
- await this.db.run(
579
+ await dbRun(
580
+ this.db,
482
581
  `UPDATE embedding_outbox
483
582
  SET status = CASE WHEN retry_count >= 3 THEN 'failed' ELSE 'pending' END,
484
583
  retry_count = retry_count + 1,
@@ -492,7 +591,8 @@ var EventStore = class {
492
591
  */
493
592
  async updateMemoryLevel(eventId, level) {
494
593
  await this.initialize();
495
- await this.db.run(
594
+ await dbRun(
595
+ this.db,
496
596
  `UPDATE memory_levels SET level = ?, promoted_at = CURRENT_TIMESTAMP WHERE event_id = ?`,
497
597
  [level, eventId]
498
598
  );
@@ -502,7 +602,8 @@ var EventStore = class {
502
602
  */
503
603
  async getLevelStats() {
504
604
  await this.initialize();
505
- const rows = await this.db.all(
605
+ const rows = await dbAll(
606
+ this.db,
506
607
  `SELECT level, COUNT(*) as count FROM memory_levels GROUP BY level`
507
608
  );
508
609
  return rows;
@@ -521,7 +622,8 @@ var EventStore = class {
521
622
  */
522
623
  async getEndlessConfig(key) {
523
624
  await this.initialize();
524
- const rows = await this.db.all(
625
+ const rows = await dbAll(
626
+ this.db,
525
627
  `SELECT value FROM endless_config WHERE key = ?`,
526
628
  [key]
527
629
  );
@@ -534,7 +636,8 @@ var EventStore = class {
534
636
  */
535
637
  async setEndlessConfig(key, value) {
536
638
  await this.initialize();
537
- await this.db.run(
639
+ await dbRun(
640
+ this.db,
538
641
  `INSERT OR REPLACE INTO endless_config (key, value, updated_at)
539
642
  VALUES (?, ?, CURRENT_TIMESTAMP)`,
540
643
  [key, JSON.stringify(value)]
@@ -545,13 +648,14 @@ var EventStore = class {
545
648
  */
546
649
  async getAllSessions() {
547
650
  await this.initialize();
548
- const rows = await this.db.all(
651
+ const rows = await dbAll(
652
+ this.db,
549
653
  `SELECT * FROM sessions ORDER BY started_at DESC`
550
654
  );
551
655
  return rows.map((row) => ({
552
656
  id: row.id,
553
- startedAt: new Date(row.started_at),
554
- endedAt: row.ended_at ? new Date(row.ended_at) : void 0,
657
+ startedAt: toDate(row.started_at),
658
+ endedAt: row.ended_at ? toDate(row.ended_at) : void 0,
555
659
  projectPath: row.project_path,
556
660
  summary: row.summary,
557
661
  tags: row.tags ? JSON.parse(row.tags) : void 0
@@ -561,7 +665,7 @@ var EventStore = class {
561
665
  * Close database connection
562
666
  */
563
667
  async close() {
564
- await this.db.close();
668
+ await dbClose(this.db);
565
669
  }
566
670
  /**
567
671
  * Convert database row to MemoryEvent
@@ -571,7 +675,7 @@ var EventStore = class {
571
675
  id: row.id,
572
676
  eventType: row.event_type,
573
677
  sessionId: row.session_id,
574
- timestamp: new Date(row.timestamp),
678
+ timestamp: toDate(row.timestamp),
575
679
  content: row.content,
576
680
  canonicalKey: row.canonical_key,
577
681
  dedupeKey: row.dedupe_key,
@@ -664,23 +768,28 @@ var VectorStore = class {
664
768
  return [];
665
769
  }
666
770
  const { limit = 5, minScore = 0.7, sessionId } = options;
667
- let query = this.table.search(queryVector).limit(limit * 2);
771
+ let query = this.table.search(queryVector).distanceType("cosine").limit(limit * 2);
668
772
  if (sessionId) {
669
773
  query = query.where(`sessionId = '${sessionId}'`);
670
774
  }
671
775
  const results = await query.toArray();
672
776
  return results.filter((r) => {
673
- const score = 1 - (r._distance || 0);
777
+ const distance = r._distance || 0;
778
+ const score = 1 - distance / 2;
674
779
  return score >= minScore;
675
- }).slice(0, limit).map((r) => ({
676
- id: r.id,
677
- eventId: r.eventId,
678
- content: r.content,
679
- score: 1 - (r._distance || 0),
680
- sessionId: r.sessionId,
681
- eventType: r.eventType,
682
- timestamp: r.timestamp
683
- }));
780
+ }).slice(0, limit).map((r) => {
781
+ const distance = r._distance || 0;
782
+ const score = 1 - distance / 2;
783
+ return {
784
+ id: r.id,
785
+ eventId: r.eventId,
786
+ content: r.content,
787
+ score,
788
+ sessionId: r.sessionId,
789
+ eventType: r.eventType,
790
+ timestamp: r.timestamp
791
+ };
792
+ });
684
793
  }
685
794
  /**
686
795
  * Delete vector by event ID
@@ -1506,7 +1615,8 @@ var WorkingSetStore = class {
1506
1615
  const expiresAt = new Date(
1507
1616
  Date.now() + this.config.workingSet.timeWindowHours * 60 * 60 * 1e3
1508
1617
  );
1509
- await this.db.run(
1618
+ await dbRun(
1619
+ this.db,
1510
1620
  `INSERT OR REPLACE INTO working_set (id, event_id, added_at, relevance_score, topics, expires_at)
1511
1621
  VALUES (?, ?, CURRENT_TIMESTAMP, ?, ?, ?)`,
1512
1622
  [
@@ -1524,7 +1634,8 @@ var WorkingSetStore = class {
1524
1634
  */
1525
1635
  async get() {
1526
1636
  await this.cleanup();
1527
- const rows = await this.db.all(
1637
+ const rows = await dbAll(
1638
+ this.db,
1528
1639
  `SELECT ws.*, e.*
1529
1640
  FROM working_set ws
1530
1641
  JOIN events e ON ws.event_id = e.id
@@ -1536,7 +1647,7 @@ var WorkingSetStore = class {
1536
1647
  id: row.id,
1537
1648
  eventType: row.event_type,
1538
1649
  sessionId: row.session_id,
1539
- timestamp: new Date(row.timestamp),
1650
+ timestamp: toDate(row.timestamp),
1540
1651
  content: row.content,
1541
1652
  canonicalKey: row.canonical_key,
1542
1653
  dedupeKey: row.dedupe_key,
@@ -1552,23 +1663,25 @@ var WorkingSetStore = class {
1552
1663
  * Get working set items (metadata only)
1553
1664
  */
1554
1665
  async getItems() {
1555
- const rows = await this.db.all(
1666
+ const rows = await dbAll(
1667
+ this.db,
1556
1668
  `SELECT * FROM working_set ORDER BY relevance_score DESC, added_at DESC`
1557
1669
  );
1558
1670
  return rows.map((row) => ({
1559
1671
  id: row.id,
1560
1672
  eventId: row.event_id,
1561
- addedAt: new Date(row.added_at),
1673
+ addedAt: toDate(row.added_at),
1562
1674
  relevanceScore: row.relevance_score,
1563
1675
  topics: row.topics ? JSON.parse(row.topics) : void 0,
1564
- expiresAt: new Date(row.expires_at)
1676
+ expiresAt: toDate(row.expires_at)
1565
1677
  }));
1566
1678
  }
1567
1679
  /**
1568
1680
  * Update relevance score for an event
1569
1681
  */
1570
1682
  async updateRelevance(eventId, score) {
1571
- await this.db.run(
1683
+ await dbRun(
1684
+ this.db,
1572
1685
  `UPDATE working_set SET relevance_score = ? WHERE event_id = ?`,
1573
1686
  [score, eventId]
1574
1687
  );
@@ -1580,7 +1693,8 @@ var WorkingSetStore = class {
1580
1693
  if (eventIds.length === 0)
1581
1694
  return;
1582
1695
  const placeholders = eventIds.map(() => "?").join(",");
1583
- await this.db.run(
1696
+ await dbRun(
1697
+ this.db,
1584
1698
  `DELETE FROM working_set WHERE event_id IN (${placeholders})`,
1585
1699
  eventIds
1586
1700
  );
@@ -1589,7 +1703,8 @@ var WorkingSetStore = class {
1589
1703
  * Get the count of items in working set
1590
1704
  */
1591
1705
  async count() {
1592
- const result = await this.db.all(
1706
+ const result = await dbAll(
1707
+ this.db,
1593
1708
  `SELECT COUNT(*) as count FROM working_set`
1594
1709
  );
1595
1710
  return result[0]?.count || 0;
@@ -1598,13 +1713,14 @@ var WorkingSetStore = class {
1598
1713
  * Clear the entire working set
1599
1714
  */
1600
1715
  async clear() {
1601
- await this.db.run(`DELETE FROM working_set`);
1716
+ await dbRun(this.db, `DELETE FROM working_set`);
1602
1717
  }
1603
1718
  /**
1604
1719
  * Check if an event is in the working set
1605
1720
  */
1606
1721
  async contains(eventId) {
1607
- const result = await this.db.all(
1722
+ const result = await dbAll(
1723
+ this.db,
1608
1724
  `SELECT COUNT(*) as count FROM working_set WHERE event_id = ?`,
1609
1725
  [eventId]
1610
1726
  );
@@ -1617,7 +1733,8 @@ var WorkingSetStore = class {
1617
1733
  const newExpiresAt = new Date(
1618
1734
  Date.now() + this.config.workingSet.timeWindowHours * 60 * 60 * 1e3
1619
1735
  );
1620
- await this.db.run(
1736
+ await dbRun(
1737
+ this.db,
1621
1738
  `UPDATE working_set SET expires_at = ? WHERE event_id = ?`,
1622
1739
  [newExpiresAt.toISOString(), eventId]
1623
1740
  );
@@ -1626,7 +1743,8 @@ var WorkingSetStore = class {
1626
1743
  * Clean up expired items
1627
1744
  */
1628
1745
  async cleanup() {
1629
- await this.db.run(
1746
+ await dbRun(
1747
+ this.db,
1630
1748
  `DELETE FROM working_set WHERE expires_at < datetime('now')`
1631
1749
  );
1632
1750
  }
@@ -1636,7 +1754,8 @@ var WorkingSetStore = class {
1636
1754
  */
1637
1755
  async enforceLimit() {
1638
1756
  const maxEvents = this.config.workingSet.maxEvents;
1639
- const keepIds = await this.db.all(
1757
+ const keepIds = await dbAll(
1758
+ this.db,
1640
1759
  `SELECT id FROM working_set
1641
1760
  ORDER BY relevance_score DESC, added_at DESC
1642
1761
  LIMIT ?`,
@@ -1646,7 +1765,8 @@ var WorkingSetStore = class {
1646
1765
  return;
1647
1766
  const keepIdList = keepIds.map((r) => r.id);
1648
1767
  const placeholders = keepIdList.map(() => "?").join(",");
1649
- await this.db.run(
1768
+ await dbRun(
1769
+ this.db,
1650
1770
  `DELETE FROM working_set WHERE id NOT IN (${placeholders})`,
1651
1771
  keepIdList
1652
1772
  );
@@ -1655,7 +1775,8 @@ var WorkingSetStore = class {
1655
1775
  * Calculate continuity score based on recent context transitions
1656
1776
  */
1657
1777
  async calculateContinuityScore() {
1658
- const result = await this.db.all(
1778
+ const result = await dbAll(
1779
+ this.db,
1659
1780
  `SELECT AVG(continuity_score) as avg_score
1660
1781
  FROM continuity_log
1661
1782
  WHERE created_at > datetime('now', '-1 hour')`
@@ -1666,7 +1787,8 @@ var WorkingSetStore = class {
1666
1787
  * Get topics from current working set for context matching
1667
1788
  */
1668
1789
  async getActiveTopics() {
1669
- const rows = await this.db.all(
1790
+ const rows = await dbAll(
1791
+ this.db,
1670
1792
  `SELECT topics FROM working_set WHERE topics IS NOT NULL`
1671
1793
  );
1672
1794
  const allTopics = /* @__PURE__ */ new Set();
@@ -1695,7 +1817,8 @@ var ConsolidatedStore = class {
1695
1817
  */
1696
1818
  async create(input) {
1697
1819
  const memoryId = randomUUID3();
1698
- await this.db.run(
1820
+ await dbRun(
1821
+ this.db,
1699
1822
  `INSERT INTO consolidated_memories
1700
1823
  (memory_id, summary, topics, source_events, confidence, created_at)
1701
1824
  VALUES (?, ?, ?, ?, ?, CURRENT_TIMESTAMP)`,
@@ -1713,7 +1836,8 @@ var ConsolidatedStore = class {
1713
1836
  * Get a consolidated memory by ID
1714
1837
  */
1715
1838
  async get(memoryId) {
1716
- const rows = await this.db.all(
1839
+ const rows = await dbAll(
1840
+ this.db,
1717
1841
  `SELECT * FROM consolidated_memories WHERE memory_id = ?`,
1718
1842
  [memoryId]
1719
1843
  );
@@ -1726,7 +1850,8 @@ var ConsolidatedStore = class {
1726
1850
  */
1727
1851
  async search(query, options) {
1728
1852
  const topK = options?.topK || 5;
1729
- const rows = await this.db.all(
1853
+ const rows = await dbAll(
1854
+ this.db,
1730
1855
  `SELECT * FROM consolidated_memories
1731
1856
  WHERE summary LIKE ?
1732
1857
  ORDER BY confidence DESC
@@ -1742,7 +1867,8 @@ var ConsolidatedStore = class {
1742
1867
  const topK = options?.topK || 5;
1743
1868
  const topicConditions = topics.map(() => `topics LIKE ?`).join(" OR ");
1744
1869
  const topicParams = topics.map((t) => `%"${t}"%`);
1745
- const rows = await this.db.all(
1870
+ const rows = await dbAll(
1871
+ this.db,
1746
1872
  `SELECT * FROM consolidated_memories
1747
1873
  WHERE ${topicConditions}
1748
1874
  ORDER BY confidence DESC
@@ -1756,7 +1882,8 @@ var ConsolidatedStore = class {
1756
1882
  */
1757
1883
  async getAll(options) {
1758
1884
  const limit = options?.limit || 100;
1759
- const rows = await this.db.all(
1885
+ const rows = await dbAll(
1886
+ this.db,
1760
1887
  `SELECT * FROM consolidated_memories
1761
1888
  ORDER BY confidence DESC, created_at DESC
1762
1889
  LIMIT ?`,
@@ -1770,7 +1897,8 @@ var ConsolidatedStore = class {
1770
1897
  async getRecent(options) {
1771
1898
  const limit = options?.limit || 10;
1772
1899
  const hours = options?.hours || 24;
1773
- const rows = await this.db.all(
1900
+ const rows = await dbAll(
1901
+ this.db,
1774
1902
  `SELECT * FROM consolidated_memories
1775
1903
  WHERE created_at > datetime('now', '-${hours} hours')
1776
1904
  ORDER BY created_at DESC
@@ -1783,7 +1911,8 @@ var ConsolidatedStore = class {
1783
1911
  * Mark a memory as accessed (tracks usage for importance scoring)
1784
1912
  */
1785
1913
  async markAccessed(memoryId) {
1786
- await this.db.run(
1914
+ await dbRun(
1915
+ this.db,
1787
1916
  `UPDATE consolidated_memories
1788
1917
  SET accessed_at = CURRENT_TIMESTAMP,
1789
1918
  access_count = access_count + 1
@@ -1795,7 +1924,8 @@ var ConsolidatedStore = class {
1795
1924
  * Update confidence score for a memory
1796
1925
  */
1797
1926
  async updateConfidence(memoryId, confidence) {
1798
- await this.db.run(
1927
+ await dbRun(
1928
+ this.db,
1799
1929
  `UPDATE consolidated_memories
1800
1930
  SET confidence = ?
1801
1931
  WHERE memory_id = ?`,
@@ -1806,7 +1936,8 @@ var ConsolidatedStore = class {
1806
1936
  * Delete a consolidated memory
1807
1937
  */
1808
1938
  async delete(memoryId) {
1809
- await this.db.run(
1939
+ await dbRun(
1940
+ this.db,
1810
1941
  `DELETE FROM consolidated_memories WHERE memory_id = ?`,
1811
1942
  [memoryId]
1812
1943
  );
@@ -1815,7 +1946,8 @@ var ConsolidatedStore = class {
1815
1946
  * Get count of consolidated memories
1816
1947
  */
1817
1948
  async count() {
1818
- const result = await this.db.all(
1949
+ const result = await dbAll(
1950
+ this.db,
1819
1951
  `SELECT COUNT(*) as count FROM consolidated_memories`
1820
1952
  );
1821
1953
  return result[0]?.count || 0;
@@ -1824,7 +1956,8 @@ var ConsolidatedStore = class {
1824
1956
  * Get most accessed memories (for importance scoring)
1825
1957
  */
1826
1958
  async getMostAccessed(limit = 10) {
1827
- const rows = await this.db.all(
1959
+ const rows = await dbAll(
1960
+ this.db,
1828
1961
  `SELECT * FROM consolidated_memories
1829
1962
  WHERE access_count > 0
1830
1963
  ORDER BY access_count DESC
@@ -1838,11 +1971,13 @@ var ConsolidatedStore = class {
1838
1971
  */
1839
1972
  async getStats() {
1840
1973
  const total = await this.count();
1841
- const avgResult = await this.db.all(
1974
+ const avgResult = await dbAll(
1975
+ this.db,
1842
1976
  `SELECT AVG(confidence) as avg FROM consolidated_memories`
1843
1977
  );
1844
1978
  const averageConfidence = avgResult[0]?.avg || 0;
1845
- const recentResult = await this.db.all(
1979
+ const recentResult = await dbAll(
1980
+ this.db,
1846
1981
  `SELECT COUNT(*) as count FROM consolidated_memories
1847
1982
  WHERE created_at > datetime('now', '-24 hours')`
1848
1983
  );
@@ -1866,7 +2001,8 @@ var ConsolidatedStore = class {
1866
2001
  */
1867
2002
  async isAlreadyConsolidated(eventIds) {
1868
2003
  for (const eventId of eventIds) {
1869
- const result = await this.db.all(
2004
+ const result = await dbAll(
2005
+ this.db,
1870
2006
  `SELECT COUNT(*) as count FROM consolidated_memories
1871
2007
  WHERE source_events LIKE ?`,
1872
2008
  [`%"${eventId}"%`]
@@ -1880,7 +2016,8 @@ var ConsolidatedStore = class {
1880
2016
  * Get the last consolidation time
1881
2017
  */
1882
2018
  async getLastConsolidationTime() {
1883
- const result = await this.db.all(
2019
+ const result = await dbAll(
2020
+ this.db,
1884
2021
  `SELECT created_at FROM consolidated_memories
1885
2022
  ORDER BY created_at DESC
1886
2023
  LIMIT 1`
@@ -1899,8 +2036,8 @@ var ConsolidatedStore = class {
1899
2036
  topics: JSON.parse(row.topics || "[]"),
1900
2037
  sourceEvents: JSON.parse(row.source_events || "[]"),
1901
2038
  confidence: row.confidence,
1902
- createdAt: new Date(row.created_at),
1903
- accessedAt: row.accessed_at ? new Date(row.accessed_at) : void 0,
2039
+ createdAt: toDate(row.created_at),
2040
+ accessedAt: row.accessed_at ? toDate(row.accessed_at) : void 0,
1904
2041
  accessCount: row.access_count || 0
1905
2042
  };
1906
2043
  }
@@ -2266,7 +2403,8 @@ var ContinuityManager = class {
2266
2403
  * Get recent continuity logs
2267
2404
  */
2268
2405
  async getRecentLogs(limit = 10) {
2269
- const rows = await this.db.all(
2406
+ const rows = await dbAll(
2407
+ this.db,
2270
2408
  `SELECT * FROM continuity_log
2271
2409
  ORDER BY created_at DESC
2272
2410
  LIMIT ?`,
@@ -2278,14 +2416,15 @@ var ContinuityManager = class {
2278
2416
  toContextId: row.to_context_id,
2279
2417
  continuityScore: row.continuity_score,
2280
2418
  transitionType: row.transition_type,
2281
- createdAt: new Date(row.created_at)
2419
+ createdAt: toDate(row.created_at)
2282
2420
  }));
2283
2421
  }
2284
2422
  /**
2285
2423
  * Get average continuity score over time period
2286
2424
  */
2287
2425
  async getAverageScore(hours = 1) {
2288
- const result = await this.db.all(
2426
+ const result = await dbAll(
2427
+ this.db,
2289
2428
  `SELECT AVG(continuity_score) as avg_score
2290
2429
  FROM continuity_log
2291
2430
  WHERE created_at > datetime('now', '-${hours} hours')`
@@ -2296,7 +2435,8 @@ var ContinuityManager = class {
2296
2435
  * Get transition type distribution
2297
2436
  */
2298
2437
  async getTransitionStats(hours = 24) {
2299
- const rows = await this.db.all(
2438
+ const rows = await dbAll(
2439
+ this.db,
2300
2440
  `SELECT transition_type, COUNT(*) as count
2301
2441
  FROM continuity_log
2302
2442
  WHERE created_at > datetime('now', '-${hours} hours')
@@ -2316,7 +2456,8 @@ var ContinuityManager = class {
2316
2456
  * Clear old continuity logs
2317
2457
  */
2318
2458
  async cleanup(olderThanDays = 7) {
2319
- const result = await this.db.all(
2459
+ const result = await dbAll(
2460
+ this.db,
2320
2461
  `DELETE FROM continuity_log
2321
2462
  WHERE created_at < datetime('now', '-${olderThanDays} days')
2322
2463
  RETURNING COUNT(*) as changes`
@@ -2351,7 +2492,8 @@ var ContinuityManager = class {
2351
2492
  * Log a context transition
2352
2493
  */
2353
2494
  async logTransition(current, previous, score, type) {
2354
- await this.db.run(
2495
+ await dbRun(
2496
+ this.db,
2355
2497
  `INSERT INTO continuity_log
2356
2498
  (log_id, from_context_id, to_context_id, continuity_score, transition_type, created_at)
2357
2499
  VALUES (?, ?, ?, ?, ?, CURRENT_TIMESTAMP)`,
@@ -2464,6 +2606,63 @@ function createContinuityManager(eventStore, config) {
2464
2606
  }
2465
2607
 
2466
2608
  // src/services/memory-service.ts
2609
+ function normalizePath(projectPath) {
2610
+ const expanded = projectPath.startsWith("~") ? path.join(os.homedir(), projectPath.slice(1)) : projectPath;
2611
+ try {
2612
+ return fs.realpathSync(expanded);
2613
+ } catch {
2614
+ return path.resolve(expanded);
2615
+ }
2616
+ }
2617
+ function hashProjectPath(projectPath) {
2618
+ const normalizedPath = normalizePath(projectPath);
2619
+ return crypto2.createHash("sha256").update(normalizedPath).digest("hex").slice(0, 8);
2620
+ }
2621
+ function getProjectStoragePath(projectPath) {
2622
+ const hash = hashProjectPath(projectPath);
2623
+ return path.join(os.homedir(), ".claude-code", "memory", "projects", hash);
2624
+ }
2625
+ var REGISTRY_PATH = path.join(os.homedir(), ".claude-code", "memory", "session-registry.json");
2626
+ function loadSessionRegistry() {
2627
+ try {
2628
+ if (fs.existsSync(REGISTRY_PATH)) {
2629
+ const data = fs.readFileSync(REGISTRY_PATH, "utf-8");
2630
+ return JSON.parse(data);
2631
+ }
2632
+ } catch (error) {
2633
+ console.error("Failed to load session registry:", error);
2634
+ }
2635
+ return { version: 1, sessions: {} };
2636
+ }
2637
+ function saveSessionRegistry(registry) {
2638
+ const dir = path.dirname(REGISTRY_PATH);
2639
+ if (!fs.existsSync(dir)) {
2640
+ fs.mkdirSync(dir, { recursive: true });
2641
+ }
2642
+ const tempPath = REGISTRY_PATH + ".tmp";
2643
+ fs.writeFileSync(tempPath, JSON.stringify(registry, null, 2));
2644
+ fs.renameSync(tempPath, REGISTRY_PATH);
2645
+ }
2646
+ function registerSession(sessionId, projectPath) {
2647
+ const registry = loadSessionRegistry();
2648
+ registry.sessions[sessionId] = {
2649
+ projectPath: normalizePath(projectPath),
2650
+ projectHash: hashProjectPath(projectPath),
2651
+ registeredAt: (/* @__PURE__ */ new Date()).toISOString()
2652
+ };
2653
+ const entries = Object.entries(registry.sessions);
2654
+ if (entries.length > 1e3) {
2655
+ const sorted = entries.sort(
2656
+ (a, b) => new Date(b[1].registeredAt).getTime() - new Date(a[1].registeredAt).getTime()
2657
+ );
2658
+ registry.sessions = Object.fromEntries(sorted.slice(0, 1e3));
2659
+ }
2660
+ saveSessionRegistry(registry);
2661
+ }
2662
+ function getSessionProject(sessionId) {
2663
+ const registry = loadSessionRegistry();
2664
+ return registry.sessions[sessionId] || null;
2665
+ }
2467
2666
  var MemoryService = class {
2468
2667
  eventStore;
2469
2668
  vectorStore;
@@ -2911,14 +3110,30 @@ var MemoryService = class {
2911
3110
  return p;
2912
3111
  }
2913
3112
  };
2914
- var defaultService = null;
3113
+ var serviceCache = /* @__PURE__ */ new Map();
3114
+ var GLOBAL_KEY = "__global__";
2915
3115
  function getDefaultMemoryService() {
2916
- if (!defaultService) {
2917
- defaultService = new MemoryService({
3116
+ if (!serviceCache.has(GLOBAL_KEY)) {
3117
+ serviceCache.set(GLOBAL_KEY, new MemoryService({
2918
3118
  storagePath: "~/.claude-code/memory"
2919
- });
3119
+ }));
3120
+ }
3121
+ return serviceCache.get(GLOBAL_KEY);
3122
+ }
3123
+ function getMemoryServiceForProject(projectPath) {
3124
+ const hash = hashProjectPath(projectPath);
3125
+ if (!serviceCache.has(hash)) {
3126
+ const storagePath = getProjectStoragePath(projectPath);
3127
+ serviceCache.set(hash, new MemoryService({ storagePath }));
3128
+ }
3129
+ return serviceCache.get(hash);
3130
+ }
3131
+ function getMemoryServiceForSession(sessionId) {
3132
+ const projectInfo = getSessionProject(sessionId);
3133
+ if (projectInfo) {
3134
+ return getMemoryServiceForProject(projectInfo.projectPath);
2920
3135
  }
2921
- return defaultService;
3136
+ return getDefaultMemoryService();
2922
3137
  }
2923
3138
  function createMemoryService(config) {
2924
3139
  return new MemoryService(config);
@@ -2926,6 +3141,12 @@ function createMemoryService(config) {
2926
3141
  export {
2927
3142
  MemoryService,
2928
3143
  createMemoryService,
2929
- getDefaultMemoryService
3144
+ getDefaultMemoryService,
3145
+ getMemoryServiceForProject,
3146
+ getMemoryServiceForSession,
3147
+ getProjectStoragePath,
3148
+ getSessionProject,
3149
+ hashProjectPath,
3150
+ registerSession
2930
3151
  };
2931
3152
  //# sourceMappingURL=memory-service.js.map