@onenomad/engram-mcp 1.0.0 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,520 +1,520 @@
1
- import { DEFAULT_CONFIG } from './types.js';
2
- import { cosineSimilarity, getEdgeTargetIds, addEdge } from './utils.js';
3
- import { consolidateEpisodic } from './episodic-consolidator.js';
4
- const SCRATCH_TTL_MS = 24 * 60 * 60 * 1000;
5
- /**
6
- * Background consolidation pass: links, decays, promotes, demotes, and merges memories.
7
- * Run this periodically (e.g., daily or at session start).
8
- */
9
- export async function consolidate(storage, config) {
10
- const cfg = config ?? DEFAULT_CONFIG;
11
- const stats = {
12
- linked: 0, decayed: 0, promoted: 0, demoted: 0,
13
- reactivated: 0, dailyMoved: 0, merged: 0,
14
- episodicClustered: 0, episodicSummarized: 0,
15
- selfOrganized: 0, scratchPurged: 0,
16
- };
17
- let chunks = await storage.listChunks();
18
- // Scratch tier: session-only memories. Purge expired ones first, then
19
- // exclude live scratch from all other consolidation paths so they never
20
- // get linked, decayed, merged, or promoted.
21
- stats.scratchPurged = await purgeExpiredScratch(storage, chunks);
22
- chunks = chunks.filter(c => c.tier !== 'scratch');
23
- // Biased replay: prioritize chunks by importance * recency * surprise
24
- if (cfg.enableBiasedReplay) {
25
- chunks = sortByReplayPriority(chunks);
26
- }
27
- stats.dailyMoved = await processDailyTier(storage, chunks);
28
- stats.promoted = await promoteChunks(storage, chunks);
29
- stats.demoted = await demoteToArchive(storage, chunks);
30
- stats.reactivated = await reactivateArchived(storage, chunks);
31
- stats.linked = await linkRelated(storage, chunks);
32
- stats.decayed = cfg.enableFSRS
33
- ? await decayFSRS(storage, chunks) + await decayIrrelevant(storage, chunks)
34
- : await decayImportance(storage, chunks) + await decayIrrelevant(storage, chunks);
35
- stats.merged = await mergeNearDuplicates(storage, chunks);
36
- // Self-organizing memories: auto-describe undescribed memories and generate cross-links
37
- stats.selfOrganized = await selfOrganize(storage, chunks);
38
- // Episodic-to-semantic consolidation (Improvement 8)
39
- if (cfg.enableEpisodicConsolidation) {
40
- const episodic = await consolidateEpisodic(cfg, storage);
41
- stats.episodicClustered = episodic.clustered;
42
- stats.episodicSummarized = episodic.summarized;
43
- }
44
- return stats;
45
- }
46
- // ── Scratch tier: session-only memories ──────────────────────────────
47
- // Created by callers who want to keep something in the working set without
48
- // it being treated as canonical knowledge. Purged after SCRATCH_TTL_MS unless
49
- // promoted explicitly via memory_scratch_promote.
50
- async function purgeExpiredScratch(storage, chunks) {
51
- let purged = 0;
52
- const now = Date.now();
53
- for (const chunk of chunks) {
54
- if (chunk.tier !== 'scratch')
55
- continue;
56
- if (now - new Date(chunk.createdAt).getTime() >= SCRATCH_TTL_MS) {
57
- await storage.deleteChunk(chunk.id);
58
- purged++;
59
- }
60
- }
61
- return purged;
62
- }
63
- // ── Daily → Short-term ───────────────────────────────────────────────
64
- async function processDailyTier(storage, chunks) {
65
- let moved = 0;
66
- const now = Date.now();
67
- const retentionMs = 2 * 86_400_000;
68
- for (const chunk of chunks) {
69
- if (chunk.tier !== 'daily')
70
- continue;
71
- if (now - new Date(chunk.createdAt).getTime() >= retentionMs) {
72
- if (chunk.importance >= 0.3 || chunk.recallCount > 0) {
73
- await storage.updateChunk(chunk.id, { tier: 'short-term' });
74
- moved++;
75
- }
76
- }
77
- }
78
- return moved;
79
- }
80
- // ── Short-term → Long-term ───────────────────────────────────────────
81
- async function promoteChunks(storage, chunks) {
82
- let promoted = 0;
83
- for (const chunk of chunks) {
84
- if (chunk.tier !== 'short-term')
85
- continue;
86
- const ageDays = daysSince(chunk.createdAt);
87
- const lastRecalledDays = chunk.lastRecalledAt ? daysSince(chunk.lastRecalledAt) : Infinity;
88
- const helpfulCount = chunk.recallOutcomes.filter(o => o.outcome === 'helpful').length;
89
- const shouldPromote = chunk.importance >= 0.8 ||
90
- (chunk.recallCount >= 3 && ageDays >= 7) ||
91
- (helpfulCount >= 2 && lastRecalledDays < 7) ||
92
- (chunk.cognitiveLayer === 'procedural' && chunk.importance >= 0.5) ||
93
- (chunk.recallCount >= 1 && ageDays >= 30 && lastRecalledDays < 7);
94
- if (shouldPromote) {
95
- await storage.updateChunk(chunk.id, { tier: 'long-term' });
96
- promoted++;
97
- }
98
- }
99
- return promoted;
100
- }
101
- // ── Long-term → Archive ──────────────────────────────────────────────
102
- async function demoteToArchive(storage, chunks) {
103
- let demoted = 0;
104
- const now = Date.now();
105
- for (const chunk of chunks) {
106
- if (chunk.tier !== 'long-term')
107
- continue;
108
- // User-asserted memories are user-territory: never auto-archive.
109
- // The user can explicitly delete or update_metadata to retire them.
110
- if (chunk.origin === 'user')
111
- continue;
112
- const ageMs = now - new Date(chunk.createdAt).getTime();
113
- const lastRecallMs = chunk.lastRecalledAt
114
- ? now - new Date(chunk.lastRecalledAt).getTime()
115
- : Infinity;
116
- const tooOld = ageMs >= 90 * 86_400_000;
117
- const inactive = lastRecallMs >= 30 * 86_400_000;
118
- if (tooOld || (inactive && chunk.importance < 0.3)) {
119
- await storage.updateChunk(chunk.id, { tier: 'archive' });
120
- demoted++;
121
- }
122
- }
123
- return demoted;
124
- }
125
- // ── Archive → Long-term (reactivation) ───────────────────────────────
126
- async function reactivateArchived(storage, chunks) {
127
- let reactivated = 0;
128
- const now = Date.now();
129
- for (const chunk of chunks) {
130
- if (chunk.tier !== 'archive' || !chunk.lastRecalledAt)
131
- continue;
132
- if (now - new Date(chunk.lastRecalledAt).getTime() < 7 * 86_400_000) {
133
- await storage.updateChunk(chunk.id, { tier: 'long-term' });
134
- reactivated++;
135
- }
136
- }
137
- return reactivated;
138
- }
139
- // ── Tag-based Linking ────────────────────────────────────────────────
140
- async function linkRelated(storage, chunks) {
141
- let linked = 0;
142
- const tagIndex = new Map();
143
- const chunkMap = new Map();
144
- for (const chunk of chunks) {
145
- if (chunk.tier === 'archive')
146
- continue;
147
- chunkMap.set(chunk.id, chunk);
148
- for (const tag of chunk.tags) {
149
- const ids = tagIndex.get(tag) ?? [];
150
- ids.push(chunk.id);
151
- tagIndex.set(tag, ids);
152
- }
153
- }
154
- const seen = new Set();
155
- for (const [, ids] of tagIndex) {
156
- if (ids.length < 2 || ids.length > 50)
157
- continue;
158
- for (let i = 0; i < ids.length; i++) {
159
- for (let j = i + 1; j < ids.length; j++) {
160
- const a = chunkMap.get(ids[i]);
161
- const b = chunkMap.get(ids[j]);
162
- const key = a.id < b.id ? `${a.id}|${b.id}` : `${b.id}|${a.id}`;
163
- if (seen.has(key))
164
- continue;
165
- seen.add(key);
166
- if (getEdgeTargetIds(a.relatedMemories).includes(b.id))
167
- continue;
168
- const overlap = a.tags.filter(t => b.tags.includes(t));
169
- if (overlap.length >= 2 || (overlap.length >= 1 && a.cognitiveLayer === b.cognitiveLayer)) {
170
- const rel = a.source === b.source ? 'temporal' : 'semantic';
171
- const weight = Math.min(1.0, 0.3 + overlap.length * 0.15);
172
- const aEdges = addEdge(a.relatedMemories, b.id, rel, weight);
173
- const bEdges = addEdge(b.relatedMemories, a.id, rel, weight);
174
- await storage.updateChunk(a.id, { relatedMemories: aEdges });
175
- await storage.updateChunk(b.id, { relatedMemories: bEdges });
176
- a.relatedMemories = aEdges;
177
- b.relatedMemories = bEdges;
178
- linked++;
179
- }
180
- }
181
- }
182
- }
183
- return linked;
184
- }
185
- // ── Importance Decay ─────────────────────────────────────────────────
186
- async function decayImportance(storage, chunks) {
187
- let decayed = 0;
188
- const now = Date.now();
189
- const rates = { procedural: 0.98, semantic: 0.97, episodic: 0.95 };
190
- const floors = { procedural: 0.15, semantic: 0.10, episodic: 0.05 };
191
- for (const chunk of chunks) {
192
- if (chunk.tier === 'archive')
193
- continue;
194
- const lastTouch = chunk.lastRecalledAt
195
- ? new Date(chunk.lastRecalledAt).getTime()
196
- : new Date(chunk.createdAt).getTime();
197
- const daysSinceTouch = (now - lastTouch) / 86_400_000;
198
- if (daysSinceTouch < 7)
199
- continue;
200
- const rate = rates[chunk.cognitiveLayer] ?? 0.97;
201
- const floor = floors[chunk.cognitiveLayer] ?? 0.10;
202
- const weeks = daysSinceTouch / 7;
203
- const newImportance = Math.max(floor, chunk.importance * Math.pow(rate, weeks));
204
- if (Math.abs(newImportance - chunk.importance) > 0.01) {
205
- await storage.updateChunk(chunk.id, { importance: newImportance });
206
- decayed++;
207
- }
208
- }
209
- return decayed;
210
- }
211
- // ── Decay Irrelevant ─────────────────────────────────────────────────
212
- async function decayIrrelevant(storage, chunks) {
213
- let decayed = 0;
214
- for (const chunk of chunks) {
215
- const outcomes = chunk.recallOutcomes;
216
- if (outcomes.length < 3)
217
- continue;
218
- const recent = outcomes.slice(-5);
219
- const irrelevant = recent.filter(o => o.outcome === 'irrelevant').length;
220
- if (irrelevant >= 3) {
221
- const newImportance = Math.max(0.05, chunk.importance - 0.2);
222
- await storage.updateChunk(chunk.id, { importance: newImportance });
223
- // Never auto-archive user-asserted memories on recall-outcome decay —
224
- // the user explicitly saved them; "irrelevant for this query" doesn't
225
- // mean retire.
226
- if (newImportance <= 0.1 && chunk.origin !== 'user') {
227
- await storage.updateChunk(chunk.id, { tier: 'archive' });
228
- }
229
- decayed++;
230
- }
231
- }
232
- return decayed;
233
- }
234
- // ── Near-duplicate Merging ───────────────────────────────────────────
235
- async function mergeNearDuplicates(storage, chunks) {
236
- let merged = 0;
237
- const consumed = new Set();
238
- const candidates = chunks
239
- .filter(c => c.cognitiveLayer === 'semantic' && c.embedding && c.embedding.length > 0)
240
- .sort((a, b) => new Date(b.lastRecalledAt ?? b.createdAt).getTime() -
241
- new Date(a.lastRecalledAt ?? a.createdAt).getTime())
242
- .slice(0, 200);
243
- for (const chunk of candidates) {
244
- if (consumed.has(chunk.id))
245
- continue;
246
- for (const other of candidates) {
247
- if (other.id === chunk.id || consumed.has(other.id))
248
- continue;
249
- if (!other.embedding || !chunk.embedding)
250
- continue;
251
- if (cosineSimilarity(chunk.embedding, other.embedding) > 0.9) {
252
- // Pick keeper by importance, but a user-origin memory always wins
253
- // over a derived one — user-asserted text is canonical, never
254
- // displaced by an auto-extracted near-duplicate.
255
- let keeper;
256
- let loser;
257
- if (chunk.origin === 'user' && other.origin !== 'user') {
258
- keeper = chunk;
259
- loser = other;
260
- }
261
- else if (other.origin === 'user' && chunk.origin !== 'user') {
262
- keeper = other;
263
- loser = chunk;
264
- }
265
- else if (chunk.origin === 'user' && other.origin === 'user') {
266
- // Both user-asserted near-duplicates: leave both. Merging would
267
- // delete content the user explicitly wrote.
268
- continue;
269
- }
270
- else {
271
- keeper = chunk.importance >= other.importance ? chunk : other;
272
- loser = keeper === chunk ? other : chunk;
273
- }
274
- await storage.updateChunk(keeper.id, {
275
- recallCount: keeper.recallCount + loser.recallCount,
276
- importance: Math.min(1.0, keeper.importance + 0.03),
277
- });
278
- await storage.deleteChunk(loser.id);
279
- consumed.add(loser.id);
280
- merged++;
281
- }
282
- }
283
- }
284
- return merged;
285
- }
286
- // ── FSRS Decay (Improvement 3) + Adaptive Forgetting ──────────────
287
- // Free Spaced Repetition Scheduler: R = (1 + t/(9*S))^(-1)
288
- // More principled than exponential decay. Stability grows with successful recall.
289
- //
290
- // Adaptive forgetting (FadeMem-inspired): semantic proximity to recently
291
- // active memories modulates decay rate. Memories near active clusters
292
- // decay slower (proximity shield), isolated memories decay faster.
293
- function fsrsRetrievability(t, S) {
294
- return Math.pow(1 + t / (9 * S), -1);
295
- }
296
- /**
297
- * Collect recently active memories (recalled or ingested in the last N days)
298
- * to serve as "active cluster" exemplars for adaptive forgetting.
299
- */
300
- function getActiveExemplars(chunks, windowDays = 7) {
301
- const now = Date.now();
302
- const cutoff = now - windowDays * 86_400_000;
303
- return chunks
304
- .filter(c => {
305
- if (c.tier === 'archive')
306
- return false;
307
- const lastActive = c.lastRecalledAt
308
- ? Math.max(new Date(c.lastRecalledAt).getTime(), new Date(c.createdAt).getTime())
309
- : new Date(c.createdAt).getTime();
310
- return lastActive >= cutoff;
311
- })
312
- .filter(c => c.embedding && c.embedding.length > 0)
313
- .sort((a, b) => {
314
- const aTime = new Date(a.lastRecalledAt ?? a.createdAt).getTime();
315
- const bTime = new Date(b.lastRecalledAt ?? b.createdAt).getTime();
316
- return bTime - aTime;
317
- })
318
- .slice(0, 10); // Keep top 10 most recent as exemplars
319
- }
320
- /**
321
- * Compute max cosine similarity between a chunk and active exemplars.
322
- * Returns 0 if no embedding available — falls back to tag overlap.
323
- */
324
- function computeProximityFactor(chunk, exemplars) {
325
- if (exemplars.length === 0)
326
- return 0;
327
- // Embedding-based proximity
328
- if (chunk.embedding && chunk.embedding.length > 0) {
329
- let maxSim = 0;
330
- for (const ex of exemplars) {
331
- if (!ex.embedding || ex.embedding.length !== chunk.embedding.length)
332
- continue;
333
- const sim = cosineSimilarity(chunk.embedding, ex.embedding);
334
- if (sim > maxSim)
335
- maxSim = sim;
336
- }
337
- return maxSim;
338
- }
339
- // Tag-based fallback: Jaccard similarity between tags
340
- if (chunk.tags.length === 0)
341
- return 0;
342
- const chunkTags = new Set(chunk.tags);
343
- let maxJaccard = 0;
344
- for (const ex of exemplars) {
345
- if (ex.tags.length === 0)
346
- continue;
347
- const exTags = new Set(ex.tags);
348
- let intersection = 0;
349
- for (const t of chunkTags)
350
- if (exTags.has(t))
351
- intersection++;
352
- const union = new Set([...chunkTags, ...exTags]).size;
353
- const jaccard = union > 0 ? intersection / union : 0;
354
- if (jaccard > maxJaccard)
355
- maxJaccard = jaccard;
356
- }
357
- return maxJaccard;
358
- }
359
- async function decayFSRS(storage, chunks) {
360
- let decayed = 0;
361
- const now = Date.now();
362
- const floors = { procedural: 0.15, semantic: 0.10, episodic: 0.05 };
363
- // Adaptive forgetting: compute active exemplars once
364
- const exemplars = getActiveExemplars(chunks);
365
- for (const chunk of chunks) {
366
- if (chunk.tier === 'archive')
367
- continue;
368
- const lastTouch = chunk.lastRecalledAt
369
- ? new Date(chunk.lastRecalledAt).getTime()
370
- : new Date(chunk.createdAt).getTime();
371
- const daysSinceTouch = (now - lastTouch) / 86_400_000;
372
- if (daysSinceTouch < 3)
373
- continue;
374
- const baseStability = chunk.stability ?? 1.0;
375
- // Adaptive forgetting: proximity to active memories modulates stability
376
- const proximity = computeProximityFactor(chunk, exemplars);
377
- let effectiveStability;
378
- if (proximity > 0.5) {
379
- // Proximate memories: stability boosted by up to 3x
380
- effectiveStability = baseStability * (1 + proximity * 2);
381
- }
382
- else if (proximity < 0.2) {
383
- // Isolated memories: accelerated decay (70% stability)
384
- effectiveStability = baseStability * 0.7;
385
- }
386
- else {
387
- // Moderate proximity: normal decay
388
- effectiveStability = baseStability;
389
- }
390
- const floor = floors[chunk.cognitiveLayer] ?? 0.10;
391
- const retrievability = fsrsRetrievability(daysSinceTouch, effectiveStability);
392
- const newImportance = Math.max(floor, chunk.importance * retrievability);
393
- if (Math.abs(newImportance - chunk.importance) > 0.01) {
394
- await storage.updateChunk(chunk.id, { importance: newImportance });
395
- decayed++;
396
- }
397
- }
398
- return decayed;
399
- }
400
- /**
401
- * Update FSRS stability after a recall outcome.
402
- * Call this from outcome.ts when a memory is recalled.
403
- */
404
- export function computeFSRSUpdate(chunk, outcome) {
405
- const S = chunk.stability ?? 1.0;
406
- const D = chunk.difficulty ?? 0.3;
407
- switch (outcome) {
408
- case 'helpful': {
409
- // Stability growth: easier memories grow faster
410
- const growth = S * (1 + Math.exp(0.1) * (11 - D * 10) * Math.pow(S, -0.2));
411
- return { stability: Math.min(365, growth), difficulty: Math.max(0, D - 0.02) };
412
- }
413
- case 'corrected': {
414
- // Halve stability, increase difficulty
415
- return { stability: Math.max(0.5, S * 0.5), difficulty: Math.min(1.0, D + 0.1) };
416
- }
417
- case 'irrelevant': {
418
- // Reduce stability, slight difficulty increase
419
- return { stability: Math.max(0.5, S * 0.8), difficulty: Math.min(1.0, D + 0.05) };
420
- }
421
- }
422
- }
423
- // ── Biased Replay (Improvement 5) ──────────────────────────────────
424
- // Prioritize consolidation by importance * recency * surprise.
425
- // High-surprise memories (corrections, contradictions) get extra attention.
426
- function sortByReplayPriority(chunks) {
427
- const now = Date.now();
428
- return [...chunks].sort((a, b) => {
429
- const priorityA = replayPriority(a, now);
430
- const priorityB = replayPriority(b, now);
431
- return priorityB - priorityA;
432
- });
433
- }
434
- function replayPriority(chunk, now) {
435
- const lastTouch = chunk.lastRecalledAt
436
- ? new Date(chunk.lastRecalledAt).getTime()
437
- : new Date(chunk.createdAt).getTime();
438
- const recencyDays = (now - lastTouch) / 86_400_000;
439
- const recency = Math.exp(-recencyDays / 30); // Half-life ~30 days
440
- const importance = chunk.importance;
441
- // Surprise: corrections and corrected outcomes score higher
442
- let surprise = 0;
443
- if (chunk.type === 'correction')
444
- surprise += 0.5;
445
- const recentOutcomes = chunk.recallOutcomes.slice(-5);
446
- const correctedCount = recentOutcomes.filter(o => o.outcome === 'corrected').length;
447
- surprise += correctedCount * 0.2;
448
- return importance * recency * (1 + surprise);
449
- }
450
- // ── Helpers ──────────────────────────────────────────────────────────
451
- // ── Self-Organizing Memories ─────────────────────────────────────────
452
- // Auto-generate domain/topic for memories that lack them, and create
453
- // cross-links between semantically related memories that share no tags.
454
- // Runs during consolidation — no new MCP tools needed.
455
- async function selfOrganize(storage, chunks) {
456
- let organized = 0;
457
- // Phase 1: Auto-describe — infer domain/topic from content heuristics
458
- const undescribed = chunks.filter(c => c.tier !== 'archive' && (!c.domain || !c.topic));
459
- for (const chunk of undescribed.slice(0, 50)) {
460
- const updates = {};
461
- const lower = chunk.content.toLowerCase();
462
- // Domain inference from keywords
463
- if (!chunk.domain) {
464
- const domainSignals = {
465
- code: ['function', 'import', 'export', 'typescript', 'react', 'component', 'api', 'endpoint', 'bug', 'refactor', 'deploy'],
466
- design: ['figma', 'layout', 'color', 'font', 'responsive', 'ui', 'ux', 'component', 'tailwind'],
467
- infrastructure: ['vercel', 'docker', 'ci/cd', 'deploy', 'database', 'postgres', 'env', 'server'],
468
- business: ['pricing', 'customer', 'marketing', 'revenue', 'competitor', 'launch', 'user'],
469
- personal: ['prefer', 'like', 'dislike', 'always', 'never', 'habit', 'schedule'],
470
- };
471
- for (const [domain, keywords] of Object.entries(domainSignals)) {
472
- const hits = keywords.filter(k => lower.includes(k)).length;
473
- if (hits >= 2) {
474
- updates.domain = domain;
475
- break;
476
- }
477
- }
478
- }
479
- // Topic inference from tags or content
480
- if (!chunk.topic && chunk.tags.length > 0) {
481
- // Use the most specific tag as topic
482
- updates.topic = chunk.tags
483
- .filter(t => t.length > 2 && !['general', 'misc', 'other'].includes(t))
484
- .sort((a, b) => b.length - a.length)[0];
485
- }
486
- if (Object.keys(updates).length > 0) {
487
- await storage.updateChunk(chunk.id, updates);
488
- organized++;
489
- }
490
- }
491
- // Phase 2: Cross-link — find semantically similar memories with no existing edges
492
- const candidates = chunks
493
- .filter(c => c.tier !== 'archive' && c.embedding && c.embedding.length > 0)
494
- .slice(0, 100);
495
- for (let i = 0; i < candidates.length; i++) {
496
- const a = candidates[i];
497
- const existingTargets = new Set(getEdgeTargetIds(a.relatedMemories));
498
- for (let j = i + 1; j < candidates.length; j++) {
499
- const b = candidates[j];
500
- if (existingTargets.has(b.id))
501
- continue;
502
- const sim = cosineSimilarity(a.embedding, b.embedding);
503
- if (sim > 0.7 && sim < 0.9) { // Related but not duplicate
504
- const weight = Math.min(1.0, sim);
505
- const aEdges = addEdge(a.relatedMemories, b.id, 'semantic', weight);
506
- const bEdges = addEdge(b.relatedMemories, a.id, 'semantic', weight);
507
- await storage.updateChunk(a.id, { relatedMemories: aEdges });
508
- await storage.updateChunk(b.id, { relatedMemories: bEdges });
509
- a.relatedMemories = aEdges;
510
- b.relatedMemories = bEdges;
511
- organized++;
512
- }
513
- }
514
- }
515
- return organized;
516
- }
517
- function daysSince(dateStr) {
518
- return (Date.now() - new Date(dateStr).getTime()) / 86_400_000;
519
- }
1
+ import { DEFAULT_CONFIG } from './types.js';
2
+ import { cosineSimilarity, getEdgeTargetIds, addEdge } from './utils.js';
3
+ import { consolidateEpisodic } from './episodic-consolidator.js';
4
+ const SCRATCH_TTL_MS = 24 * 60 * 60 * 1000;
5
+ /**
6
+ * Background consolidation pass: links, decays, promotes, demotes, and merges memories.
7
+ * Run this periodically (e.g., daily or at session start).
8
+ */
9
+ export async function consolidate(storage, config) {
10
+ const cfg = config ?? DEFAULT_CONFIG;
11
+ const stats = {
12
+ linked: 0, decayed: 0, promoted: 0, demoted: 0,
13
+ reactivated: 0, dailyMoved: 0, merged: 0,
14
+ episodicClustered: 0, episodicSummarized: 0,
15
+ selfOrganized: 0, scratchPurged: 0,
16
+ };
17
+ let chunks = await storage.listChunks();
18
+ // Scratch tier: session-only memories. Purge expired ones first, then
19
+ // exclude live scratch from all other consolidation paths so they never
20
+ // get linked, decayed, merged, or promoted.
21
+ stats.scratchPurged = await purgeExpiredScratch(storage, chunks);
22
+ chunks = chunks.filter(c => c.tier !== 'scratch');
23
+ // Biased replay: prioritize chunks by importance * recency * surprise
24
+ if (cfg.enableBiasedReplay) {
25
+ chunks = sortByReplayPriority(chunks);
26
+ }
27
+ stats.dailyMoved = await processDailyTier(storage, chunks);
28
+ stats.promoted = await promoteChunks(storage, chunks);
29
+ stats.demoted = await demoteToArchive(storage, chunks);
30
+ stats.reactivated = await reactivateArchived(storage, chunks);
31
+ stats.linked = await linkRelated(storage, chunks);
32
+ stats.decayed = cfg.enableFSRS
33
+ ? await decayFSRS(storage, chunks) + await decayIrrelevant(storage, chunks)
34
+ : await decayImportance(storage, chunks) + await decayIrrelevant(storage, chunks);
35
+ stats.merged = await mergeNearDuplicates(storage, chunks);
36
+ // Self-organizing memories: auto-describe undescribed memories and generate cross-links
37
+ stats.selfOrganized = await selfOrganize(storage, chunks);
38
+ // Episodic-to-semantic consolidation (Improvement 8)
39
+ if (cfg.enableEpisodicConsolidation) {
40
+ const episodic = await consolidateEpisodic(cfg, storage);
41
+ stats.episodicClustered = episodic.clustered;
42
+ stats.episodicSummarized = episodic.summarized;
43
+ }
44
+ return stats;
45
+ }
46
+ // ── Scratch tier: session-only memories ──────────────────────────────
47
+ // Created by callers who want to keep something in the working set without
48
+ // it being treated as canonical knowledge. Purged after SCRATCH_TTL_MS unless
49
+ // promoted explicitly via engram-scratch-promote.
50
+ async function purgeExpiredScratch(storage, chunks) {
51
+ let purged = 0;
52
+ const now = Date.now();
53
+ for (const chunk of chunks) {
54
+ if (chunk.tier !== 'scratch')
55
+ continue;
56
+ if (now - new Date(chunk.createdAt).getTime() >= SCRATCH_TTL_MS) {
57
+ await storage.deleteChunk(chunk.id);
58
+ purged++;
59
+ }
60
+ }
61
+ return purged;
62
+ }
63
+ // ── Daily → Short-term ───────────────────────────────────────────────
64
+ async function processDailyTier(storage, chunks) {
65
+ let moved = 0;
66
+ const now = Date.now();
67
+ const retentionMs = 2 * 86_400_000;
68
+ for (const chunk of chunks) {
69
+ if (chunk.tier !== 'daily')
70
+ continue;
71
+ if (now - new Date(chunk.createdAt).getTime() >= retentionMs) {
72
+ if (chunk.importance >= 0.3 || chunk.recallCount > 0) {
73
+ await storage.updateChunk(chunk.id, { tier: 'short-term' });
74
+ moved++;
75
+ }
76
+ }
77
+ }
78
+ return moved;
79
+ }
80
+ // ── Short-term → Long-term ───────────────────────────────────────────
81
+ async function promoteChunks(storage, chunks) {
82
+ let promoted = 0;
83
+ for (const chunk of chunks) {
84
+ if (chunk.tier !== 'short-term')
85
+ continue;
86
+ const ageDays = daysSince(chunk.createdAt);
87
+ const lastRecalledDays = chunk.lastRecalledAt ? daysSince(chunk.lastRecalledAt) : Infinity;
88
+ const helpfulCount = chunk.recallOutcomes.filter(o => o.outcome === 'helpful').length;
89
+ const shouldPromote = chunk.importance >= 0.8 ||
90
+ (chunk.recallCount >= 3 && ageDays >= 7) ||
91
+ (helpfulCount >= 2 && lastRecalledDays < 7) ||
92
+ (chunk.cognitiveLayer === 'procedural' && chunk.importance >= 0.5) ||
93
+ (chunk.recallCount >= 1 && ageDays >= 30 && lastRecalledDays < 7);
94
+ if (shouldPromote) {
95
+ await storage.updateChunk(chunk.id, { tier: 'long-term' });
96
+ promoted++;
97
+ }
98
+ }
99
+ return promoted;
100
+ }
101
+ // ── Long-term → Archive ──────────────────────────────────────────────
102
+ async function demoteToArchive(storage, chunks) {
103
+ let demoted = 0;
104
+ const now = Date.now();
105
+ for (const chunk of chunks) {
106
+ if (chunk.tier !== 'long-term')
107
+ continue;
108
+ // User-asserted memories are user-territory: never auto-archive.
109
+ // The user can explicitly delete or update_metadata to retire them.
110
+ if (chunk.origin === 'user')
111
+ continue;
112
+ const ageMs = now - new Date(chunk.createdAt).getTime();
113
+ const lastRecallMs = chunk.lastRecalledAt
114
+ ? now - new Date(chunk.lastRecalledAt).getTime()
115
+ : Infinity;
116
+ const tooOld = ageMs >= 90 * 86_400_000;
117
+ const inactive = lastRecallMs >= 30 * 86_400_000;
118
+ if (tooOld || (inactive && chunk.importance < 0.3)) {
119
+ await storage.updateChunk(chunk.id, { tier: 'archive' });
120
+ demoted++;
121
+ }
122
+ }
123
+ return demoted;
124
+ }
125
+ // ── Archive → Long-term (reactivation) ───────────────────────────────
126
+ async function reactivateArchived(storage, chunks) {
127
+ let reactivated = 0;
128
+ const now = Date.now();
129
+ for (const chunk of chunks) {
130
+ if (chunk.tier !== 'archive' || !chunk.lastRecalledAt)
131
+ continue;
132
+ if (now - new Date(chunk.lastRecalledAt).getTime() < 7 * 86_400_000) {
133
+ await storage.updateChunk(chunk.id, { tier: 'long-term' });
134
+ reactivated++;
135
+ }
136
+ }
137
+ return reactivated;
138
+ }
139
+ // ── Tag-based Linking ────────────────────────────────────────────────
140
+ async function linkRelated(storage, chunks) {
141
+ let linked = 0;
142
+ const tagIndex = new Map();
143
+ const chunkMap = new Map();
144
+ for (const chunk of chunks) {
145
+ if (chunk.tier === 'archive')
146
+ continue;
147
+ chunkMap.set(chunk.id, chunk);
148
+ for (const tag of chunk.tags) {
149
+ const ids = tagIndex.get(tag) ?? [];
150
+ ids.push(chunk.id);
151
+ tagIndex.set(tag, ids);
152
+ }
153
+ }
154
+ const seen = new Set();
155
+ for (const [, ids] of tagIndex) {
156
+ if (ids.length < 2 || ids.length > 50)
157
+ continue;
158
+ for (let i = 0; i < ids.length; i++) {
159
+ for (let j = i + 1; j < ids.length; j++) {
160
+ const a = chunkMap.get(ids[i]);
161
+ const b = chunkMap.get(ids[j]);
162
+ const key = a.id < b.id ? `${a.id}|${b.id}` : `${b.id}|${a.id}`;
163
+ if (seen.has(key))
164
+ continue;
165
+ seen.add(key);
166
+ if (getEdgeTargetIds(a.relatedMemories).includes(b.id))
167
+ continue;
168
+ const overlap = a.tags.filter(t => b.tags.includes(t));
169
+ if (overlap.length >= 2 || (overlap.length >= 1 && a.cognitiveLayer === b.cognitiveLayer)) {
170
+ const rel = a.source === b.source ? 'temporal' : 'semantic';
171
+ const weight = Math.min(1.0, 0.3 + overlap.length * 0.15);
172
+ const aEdges = addEdge(a.relatedMemories, b.id, rel, weight);
173
+ const bEdges = addEdge(b.relatedMemories, a.id, rel, weight);
174
+ await storage.updateChunk(a.id, { relatedMemories: aEdges });
175
+ await storage.updateChunk(b.id, { relatedMemories: bEdges });
176
+ a.relatedMemories = aEdges;
177
+ b.relatedMemories = bEdges;
178
+ linked++;
179
+ }
180
+ }
181
+ }
182
+ }
183
+ return linked;
184
+ }
185
+ // ── Importance Decay ─────────────────────────────────────────────────
186
+ async function decayImportance(storage, chunks) {
187
+ let decayed = 0;
188
+ const now = Date.now();
189
+ const rates = { procedural: 0.98, semantic: 0.97, episodic: 0.95 };
190
+ const floors = { procedural: 0.15, semantic: 0.10, episodic: 0.05 };
191
+ for (const chunk of chunks) {
192
+ if (chunk.tier === 'archive')
193
+ continue;
194
+ const lastTouch = chunk.lastRecalledAt
195
+ ? new Date(chunk.lastRecalledAt).getTime()
196
+ : new Date(chunk.createdAt).getTime();
197
+ const daysSinceTouch = (now - lastTouch) / 86_400_000;
198
+ if (daysSinceTouch < 7)
199
+ continue;
200
+ const rate = rates[chunk.cognitiveLayer] ?? 0.97;
201
+ const floor = floors[chunk.cognitiveLayer] ?? 0.10;
202
+ const weeks = daysSinceTouch / 7;
203
+ const newImportance = Math.max(floor, chunk.importance * Math.pow(rate, weeks));
204
+ if (Math.abs(newImportance - chunk.importance) > 0.01) {
205
+ await storage.updateChunk(chunk.id, { importance: newImportance });
206
+ decayed++;
207
+ }
208
+ }
209
+ return decayed;
210
+ }
211
+ // ── Decay Irrelevant ─────────────────────────────────────────────────
212
+ async function decayIrrelevant(storage, chunks) {
213
+ let decayed = 0;
214
+ for (const chunk of chunks) {
215
+ const outcomes = chunk.recallOutcomes;
216
+ if (outcomes.length < 3)
217
+ continue;
218
+ const recent = outcomes.slice(-5);
219
+ const irrelevant = recent.filter(o => o.outcome === 'irrelevant').length;
220
+ if (irrelevant >= 3) {
221
+ const newImportance = Math.max(0.05, chunk.importance - 0.2);
222
+ await storage.updateChunk(chunk.id, { importance: newImportance });
223
+ // Never auto-archive user-asserted memories on recall-outcome decay —
224
+ // the user explicitly saved them; "irrelevant for this query" doesn't
225
+ // mean retire.
226
+ if (newImportance <= 0.1 && chunk.origin !== 'user') {
227
+ await storage.updateChunk(chunk.id, { tier: 'archive' });
228
+ }
229
+ decayed++;
230
+ }
231
+ }
232
+ return decayed;
233
+ }
234
+ // ── Near-duplicate Merging ───────────────────────────────────────────
235
+ async function mergeNearDuplicates(storage, chunks) {
236
+ let merged = 0;
237
+ const consumed = new Set();
238
+ const candidates = chunks
239
+ .filter(c => c.cognitiveLayer === 'semantic' && c.embedding && c.embedding.length > 0)
240
+ .sort((a, b) => new Date(b.lastRecalledAt ?? b.createdAt).getTime() -
241
+ new Date(a.lastRecalledAt ?? a.createdAt).getTime())
242
+ .slice(0, 200);
243
+ for (const chunk of candidates) {
244
+ if (consumed.has(chunk.id))
245
+ continue;
246
+ for (const other of candidates) {
247
+ if (other.id === chunk.id || consumed.has(other.id))
248
+ continue;
249
+ if (!other.embedding || !chunk.embedding)
250
+ continue;
251
+ if (cosineSimilarity(chunk.embedding, other.embedding) > 0.9) {
252
+ // Pick keeper by importance, but a user-origin memory always wins
253
+ // over a derived one — user-asserted text is canonical, never
254
+ // displaced by an auto-extracted near-duplicate.
255
+ let keeper;
256
+ let loser;
257
+ if (chunk.origin === 'user' && other.origin !== 'user') {
258
+ keeper = chunk;
259
+ loser = other;
260
+ }
261
+ else if (other.origin === 'user' && chunk.origin !== 'user') {
262
+ keeper = other;
263
+ loser = chunk;
264
+ }
265
+ else if (chunk.origin === 'user' && other.origin === 'user') {
266
+ // Both user-asserted near-duplicates: leave both. Merging would
267
+ // delete content the user explicitly wrote.
268
+ continue;
269
+ }
270
+ else {
271
+ keeper = chunk.importance >= other.importance ? chunk : other;
272
+ loser = keeper === chunk ? other : chunk;
273
+ }
274
+ await storage.updateChunk(keeper.id, {
275
+ recallCount: keeper.recallCount + loser.recallCount,
276
+ importance: Math.min(1.0, keeper.importance + 0.03),
277
+ });
278
+ await storage.deleteChunk(loser.id);
279
+ consumed.add(loser.id);
280
+ merged++;
281
+ }
282
+ }
283
+ }
284
+ return merged;
285
+ }
286
+ // ── FSRS Decay (Improvement 3) + Adaptive Forgetting ──────────────
287
+ // Free Spaced Repetition Scheduler: R = (1 + t/(9*S))^(-1)
288
+ // More principled than exponential decay. Stability grows with successful recall.
289
+ //
290
+ // Adaptive forgetting (FadeMem-inspired): semantic proximity to recently
291
+ // active memories modulates decay rate. Memories near active clusters
292
+ // decay slower (proximity shield), isolated memories decay faster.
293
+ function fsrsRetrievability(t, S) {
294
+ return Math.pow(1 + t / (9 * S), -1);
295
+ }
296
+ /**
297
+ * Collect recently active memories (recalled or ingested in the last N days)
298
+ * to serve as "active cluster" exemplars for adaptive forgetting.
299
+ */
300
+ function getActiveExemplars(chunks, windowDays = 7) {
301
+ const now = Date.now();
302
+ const cutoff = now - windowDays * 86_400_000;
303
+ return chunks
304
+ .filter(c => {
305
+ if (c.tier === 'archive')
306
+ return false;
307
+ const lastActive = c.lastRecalledAt
308
+ ? Math.max(new Date(c.lastRecalledAt).getTime(), new Date(c.createdAt).getTime())
309
+ : new Date(c.createdAt).getTime();
310
+ return lastActive >= cutoff;
311
+ })
312
+ .filter(c => c.embedding && c.embedding.length > 0)
313
+ .sort((a, b) => {
314
+ const aTime = new Date(a.lastRecalledAt ?? a.createdAt).getTime();
315
+ const bTime = new Date(b.lastRecalledAt ?? b.createdAt).getTime();
316
+ return bTime - aTime;
317
+ })
318
+ .slice(0, 10); // Keep top 10 most recent as exemplars
319
+ }
320
+ /**
321
+ * Compute max cosine similarity between a chunk and active exemplars.
322
+ * Returns 0 if no embedding available — falls back to tag overlap.
323
+ */
324
+ function computeProximityFactor(chunk, exemplars) {
325
+ if (exemplars.length === 0)
326
+ return 0;
327
+ // Embedding-based proximity
328
+ if (chunk.embedding && chunk.embedding.length > 0) {
329
+ let maxSim = 0;
330
+ for (const ex of exemplars) {
331
+ if (!ex.embedding || ex.embedding.length !== chunk.embedding.length)
332
+ continue;
333
+ const sim = cosineSimilarity(chunk.embedding, ex.embedding);
334
+ if (sim > maxSim)
335
+ maxSim = sim;
336
+ }
337
+ return maxSim;
338
+ }
339
+ // Tag-based fallback: Jaccard similarity between tags
340
+ if (chunk.tags.length === 0)
341
+ return 0;
342
+ const chunkTags = new Set(chunk.tags);
343
+ let maxJaccard = 0;
344
+ for (const ex of exemplars) {
345
+ if (ex.tags.length === 0)
346
+ continue;
347
+ const exTags = new Set(ex.tags);
348
+ let intersection = 0;
349
+ for (const t of chunkTags)
350
+ if (exTags.has(t))
351
+ intersection++;
352
+ const union = new Set([...chunkTags, ...exTags]).size;
353
+ const jaccard = union > 0 ? intersection / union : 0;
354
+ if (jaccard > maxJaccard)
355
+ maxJaccard = jaccard;
356
+ }
357
+ return maxJaccard;
358
+ }
359
+ async function decayFSRS(storage, chunks) {
360
+ let decayed = 0;
361
+ const now = Date.now();
362
+ const floors = { procedural: 0.15, semantic: 0.10, episodic: 0.05 };
363
+ // Adaptive forgetting: compute active exemplars once
364
+ const exemplars = getActiveExemplars(chunks);
365
+ for (const chunk of chunks) {
366
+ if (chunk.tier === 'archive')
367
+ continue;
368
+ const lastTouch = chunk.lastRecalledAt
369
+ ? new Date(chunk.lastRecalledAt).getTime()
370
+ : new Date(chunk.createdAt).getTime();
371
+ const daysSinceTouch = (now - lastTouch) / 86_400_000;
372
+ if (daysSinceTouch < 3)
373
+ continue;
374
+ const baseStability = chunk.stability ?? 1.0;
375
+ // Adaptive forgetting: proximity to active memories modulates stability
376
+ const proximity = computeProximityFactor(chunk, exemplars);
377
+ let effectiveStability;
378
+ if (proximity > 0.5) {
379
+ // Proximate memories: stability boosted by up to 3x
380
+ effectiveStability = baseStability * (1 + proximity * 2);
381
+ }
382
+ else if (proximity < 0.2) {
383
+ // Isolated memories: accelerated decay (70% stability)
384
+ effectiveStability = baseStability * 0.7;
385
+ }
386
+ else {
387
+ // Moderate proximity: normal decay
388
+ effectiveStability = baseStability;
389
+ }
390
+ const floor = floors[chunk.cognitiveLayer] ?? 0.10;
391
+ const retrievability = fsrsRetrievability(daysSinceTouch, effectiveStability);
392
+ const newImportance = Math.max(floor, chunk.importance * retrievability);
393
+ if (Math.abs(newImportance - chunk.importance) > 0.01) {
394
+ await storage.updateChunk(chunk.id, { importance: newImportance });
395
+ decayed++;
396
+ }
397
+ }
398
+ return decayed;
399
+ }
400
+ /**
401
+ * Update FSRS stability after a recall outcome.
402
+ * Call this from outcome.ts when a memory is recalled.
403
+ */
404
+ export function computeFSRSUpdate(chunk, outcome) {
405
+ const S = chunk.stability ?? 1.0;
406
+ const D = chunk.difficulty ?? 0.3;
407
+ switch (outcome) {
408
+ case 'helpful': {
409
+ // Stability growth: easier memories grow faster
410
+ const growth = S * (1 + Math.exp(0.1) * (11 - D * 10) * Math.pow(S, -0.2));
411
+ return { stability: Math.min(365, growth), difficulty: Math.max(0, D - 0.02) };
412
+ }
413
+ case 'corrected': {
414
+ // Halve stability, increase difficulty
415
+ return { stability: Math.max(0.5, S * 0.5), difficulty: Math.min(1.0, D + 0.1) };
416
+ }
417
+ case 'irrelevant': {
418
+ // Reduce stability, slight difficulty increase
419
+ return { stability: Math.max(0.5, S * 0.8), difficulty: Math.min(1.0, D + 0.05) };
420
+ }
421
+ }
422
+ }
423
+ // ── Biased Replay (Improvement 5) ──────────────────────────────────
424
+ // Prioritize consolidation by importance * recency * surprise.
425
+ // High-surprise memories (corrections, contradictions) get extra attention.
426
+ function sortByReplayPriority(chunks) {
427
+ const now = Date.now();
428
+ return [...chunks].sort((a, b) => {
429
+ const priorityA = replayPriority(a, now);
430
+ const priorityB = replayPriority(b, now);
431
+ return priorityB - priorityA;
432
+ });
433
+ }
434
+ function replayPriority(chunk, now) {
435
+ const lastTouch = chunk.lastRecalledAt
436
+ ? new Date(chunk.lastRecalledAt).getTime()
437
+ : new Date(chunk.createdAt).getTime();
438
+ const recencyDays = (now - lastTouch) / 86_400_000;
439
+ const recency = Math.exp(-recencyDays / 30); // Half-life ~30 days
440
+ const importance = chunk.importance;
441
+ // Surprise: corrections and corrected outcomes score higher
442
+ let surprise = 0;
443
+ if (chunk.type === 'correction')
444
+ surprise += 0.5;
445
+ const recentOutcomes = chunk.recallOutcomes.slice(-5);
446
+ const correctedCount = recentOutcomes.filter(o => o.outcome === 'corrected').length;
447
+ surprise += correctedCount * 0.2;
448
+ return importance * recency * (1 + surprise);
449
+ }
450
+ // ── Helpers ──────────────────────────────────────────────────────────
451
+ // ── Self-Organizing Memories ─────────────────────────────────────────
452
+ // Auto-generate domain/topic for memories that lack them, and create
453
+ // cross-links between semantically related memories that share no tags.
454
+ // Runs during consolidation — no new MCP tools needed.
455
+ async function selfOrganize(storage, chunks) {
456
+ let organized = 0;
457
+ // Phase 1: Auto-describe — infer domain/topic from content heuristics
458
+ const undescribed = chunks.filter(c => c.tier !== 'archive' && (!c.domain || !c.topic));
459
+ for (const chunk of undescribed.slice(0, 50)) {
460
+ const updates = {};
461
+ const lower = chunk.content.toLowerCase();
462
+ // Domain inference from keywords
463
+ if (!chunk.domain) {
464
+ const domainSignals = {
465
+ code: ['function', 'import', 'export', 'typescript', 'react', 'component', 'api', 'endpoint', 'bug', 'refactor', 'deploy'],
466
+ design: ['figma', 'layout', 'color', 'font', 'responsive', 'ui', 'ux', 'component', 'tailwind'],
467
+ infrastructure: ['vercel', 'docker', 'ci/cd', 'deploy', 'database', 'postgres', 'env', 'server'],
468
+ business: ['pricing', 'customer', 'marketing', 'revenue', 'competitor', 'launch', 'user'],
469
+ personal: ['prefer', 'like', 'dislike', 'always', 'never', 'habit', 'schedule'],
470
+ };
471
+ for (const [domain, keywords] of Object.entries(domainSignals)) {
472
+ const hits = keywords.filter(k => lower.includes(k)).length;
473
+ if (hits >= 2) {
474
+ updates.domain = domain;
475
+ break;
476
+ }
477
+ }
478
+ }
479
+ // Topic inference from tags or content
480
+ if (!chunk.topic && chunk.tags.length > 0) {
481
+ // Use the most specific tag as topic
482
+ updates.topic = chunk.tags
483
+ .filter(t => t.length > 2 && !['general', 'misc', 'other'].includes(t))
484
+ .sort((a, b) => b.length - a.length)[0];
485
+ }
486
+ if (Object.keys(updates).length > 0) {
487
+ await storage.updateChunk(chunk.id, updates);
488
+ organized++;
489
+ }
490
+ }
491
+ // Phase 2: Cross-link — find semantically similar memories with no existing edges
492
+ const candidates = chunks
493
+ .filter(c => c.tier !== 'archive' && c.embedding && c.embedding.length > 0)
494
+ .slice(0, 100);
495
+ for (let i = 0; i < candidates.length; i++) {
496
+ const a = candidates[i];
497
+ const existingTargets = new Set(getEdgeTargetIds(a.relatedMemories));
498
+ for (let j = i + 1; j < candidates.length; j++) {
499
+ const b = candidates[j];
500
+ if (existingTargets.has(b.id))
501
+ continue;
502
+ const sim = cosineSimilarity(a.embedding, b.embedding);
503
+ if (sim > 0.7 && sim < 0.9) { // Related but not duplicate
504
+ const weight = Math.min(1.0, sim);
505
+ const aEdges = addEdge(a.relatedMemories, b.id, 'semantic', weight);
506
+ const bEdges = addEdge(b.relatedMemories, a.id, 'semantic', weight);
507
+ await storage.updateChunk(a.id, { relatedMemories: aEdges });
508
+ await storage.updateChunk(b.id, { relatedMemories: bEdges });
509
+ a.relatedMemories = aEdges;
510
+ b.relatedMemories = bEdges;
511
+ organized++;
512
+ }
513
+ }
514
+ }
515
+ return organized;
516
+ }
517
+ function daysSince(dateStr) {
518
+ return (Date.now() - new Date(dateStr).getTime()) / 86_400_000;
519
+ }
520
520
  //# sourceMappingURL=consolidator.js.map