@cartisien/engram 0.2.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/README.md +104 -203
  2. package/dist/adapters/base.d.ts +2 -0
  3. package/dist/adapters/base.d.ts.map +1 -0
  4. package/dist/adapters/base.js +2 -0
  5. package/dist/adapters/base.js.map +1 -0
  6. package/dist/adapters/memory.d.ts +17 -0
  7. package/dist/adapters/memory.d.ts.map +1 -0
  8. package/dist/adapters/memory.js +58 -0
  9. package/dist/adapters/memory.js.map +1 -0
  10. package/dist/adapters/postgres.d.ts +33 -0
  11. package/dist/adapters/postgres.d.ts.map +1 -0
  12. package/dist/adapters/postgres.js +47 -0
  13. package/dist/adapters/postgres.js.map +1 -0
  14. package/dist/adapters/sqlite.d.ts +19 -0
  15. package/dist/adapters/sqlite.d.ts.map +1 -0
  16. package/dist/adapters/sqlite.js +33 -0
  17. package/dist/adapters/sqlite.js.map +1 -0
  18. package/dist/engram.d.ts +57 -0
  19. package/dist/engram.d.ts.map +1 -0
  20. package/dist/engram.js +148 -0
  21. package/dist/engram.js.map +1 -0
  22. package/dist/index.d.ts +99 -122
  23. package/dist/index.d.ts.map +1 -1
  24. package/dist/index.js +458 -468
  25. package/dist/index.js.map +1 -1
  26. package/dist/types.d.ts +48 -0
  27. package/dist/types.d.ts.map +1 -0
  28. package/dist/types.js +8 -0
  29. package/dist/types.js.map +1 -0
  30. package/dist/utils/embeddings.d.ts +20 -0
  31. package/dist/utils/embeddings.d.ts.map +1 -0
  32. package/dist/utils/embeddings.js +28 -0
  33. package/dist/utils/embeddings.js.map +1 -0
  34. package/dist/utils/similarity.d.ts +10 -0
  35. package/dist/utils/similarity.d.ts.map +1 -0
  36. package/dist/utils/similarity.js +31 -0
  37. package/dist/utils/similarity.js.map +1 -0
  38. package/package.json +4 -3
  39. package/LICENSE +0 -21
  40. package/dist/example/temporal-demo.js +0 -91
package/dist/index.js CHANGED
@@ -1,279 +1,41 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.Engram = exports.TemporalQuery = void 0;
4
- const crypto_1 = require("crypto");
1
+ import { createHash } from 'crypto';
5
2
  /**
6
- * Temporal query parser for natural language time expressions
7
- */
8
- class TemporalQuery {
9
- referenceDate;
10
- timezoneOffset;
11
- constructor(referenceDate = new Date(), timezoneOffset = -referenceDate.getTimezoneOffset()) {
12
- this.referenceDate = new Date(referenceDate);
13
- this.timezoneOffset = timezoneOffset;
14
- }
15
- /**
16
- * Parse a natural language time expression into a concrete time range
17
- *
18
- * Supports:
19
- * - Relative: 'today', 'yesterday', 'tomorrow'
20
- * - Days ago: '3 days ago', 'a week ago', '2 weeks ago'
21
- * - Last: 'last monday', 'last week', 'last month'
22
- * - This: 'this week', 'this month', 'this year'
23
- * - Recent: 'recent', 'lately', 'recently' (last 7 days)
24
- * - Between: 'january 15 to january 20', '3/1 to 3/15'
25
- */
26
- parse(expression) {
27
- const normalized = expression.toLowerCase().trim();
28
- const now = new Date(this.referenceDate);
29
- // Handle 'now', 'recent', 'lately', 'recently' → last 7 days
30
- if (/^(now|recent|lately|recently)$/.test(normalized)) {
31
- const end = new Date(now);
32
- const start = new Date(now);
33
- start.setDate(start.getDate() - 7);
34
- return { start, end, description: 'last 7 days' };
35
- }
36
- // Handle 'today'
37
- if (normalized === 'today') {
38
- const start = this.startOfDay(now);
39
- const end = new Date(now);
40
- return { start, end, description: 'today' };
41
- }
42
- // Handle 'yesterday'
43
- if (normalized === 'yesterday') {
44
- const start = this.startOfDay(now);
45
- start.setDate(start.getDate() - 1);
46
- const end = this.endOfDay(start);
47
- return { start, end, description: 'yesterday' };
48
- }
49
- // Handle 'tomorrow' (future, but useful for completeness)
50
- if (normalized === 'tomorrow') {
51
- const start = this.startOfDay(now);
52
- start.setDate(start.getDate() + 1);
53
- const end = this.endOfDay(start);
54
- return { start, end, description: 'tomorrow' };
55
- }
56
- // Handle 'N days ago' / 'a week ago' / 'N weeks ago'
57
- const daysAgoMatch = normalized.match(/^(?:(\d+)|a|one)\s+(day|week|month)s?\s+ago$/);
58
- if (daysAgoMatch) {
59
- const num = daysAgoMatch[1] ? parseInt(daysAgoMatch[1]) : 1;
60
- const unit = daysAgoMatch[2];
61
- const start = this.startOfDay(now);
62
- const end = this.endOfDay(now);
63
- if (unit === 'day') {
64
- start.setDate(start.getDate() - num);
65
- end.setDate(end.getDate() - num);
66
- }
67
- else if (unit === 'week') {
68
- start.setDate(start.getDate() - (num * 7));
69
- end.setDate(end.getDate() - ((num - 1) * 7) - 1);
70
- }
71
- else if (unit === 'month') {
72
- start.setMonth(start.getMonth() - num);
73
- start.setDate(1);
74
- end.setMonth(end.getMonth() - num + 1);
75
- end.setDate(0);
76
- }
77
- return { start, end, description: `${num} ${unit}${num > 1 ? 's' : ''} ago` };
78
- }
79
- // Handle 'last N days/weeks/months' (range ending now)
80
- const lastNMatch = normalized.match(/^last\s+(?:(\d+)|a|one)\s+(day|week|month)s?$/);
81
- if (lastNMatch) {
82
- const num = lastNMatch[1] ? parseInt(lastNMatch[1]) : 1;
83
- const unit = lastNMatch[2];
84
- const start = new Date(now);
85
- const end = new Date(now);
86
- if (unit === 'day') {
87
- start.setDate(start.getDate() - num);
88
- }
89
- else if (unit === 'week') {
90
- start.setDate(start.getDate() - (num * 7));
91
- }
92
- else if (unit === 'month') {
93
- start.setMonth(start.getMonth() - num);
94
- }
95
- return { start, end, description: `last ${num} ${unit}${num > 1 ? 's' : ''}` };
96
- }
97
- // Handle 'this week/month/year'
98
- const thisMatch = normalized.match(/^this\s+(week|month|year)$/);
99
- if (thisMatch) {
100
- const unit = thisMatch[1];
101
- const start = new Date(now);
102
- const end = new Date(now);
103
- if (unit === 'week') {
104
- const dayOfWeek = start.getDay();
105
- start.setDate(start.getDate() - dayOfWeek);
106
- start.setHours(0, 0, 0, 0);
107
- end.setDate(start.getDate() + 6);
108
- end.setHours(23, 59, 59, 999);
109
- }
110
- else if (unit === 'month') {
111
- start.setDate(1);
112
- start.setHours(0, 0, 0, 0);
113
- end.setMonth(end.getMonth() + 1);
114
- end.setDate(0);
115
- end.setHours(23, 59, 59, 999);
116
- }
117
- else if (unit === 'year') {
118
- start.setMonth(0, 1);
119
- start.setHours(0, 0, 0, 0);
120
- end.setMonth(11, 31);
121
- end.setHours(23, 59, 59, 999);
122
- }
123
- return { start, end, description: `this ${unit}` };
124
- }
125
- // Handle 'last week/month/year' (previous full period)
126
- const lastPeriodMatch = normalized.match(/^last\s+(week|month|year)$/);
127
- if (lastPeriodMatch) {
128
- const unit = lastPeriodMatch[1];
129
- const start = new Date(now);
130
- const end = new Date(now);
131
- if (unit === 'week') {
132
- const dayOfWeek = start.getDay();
133
- start.setDate(start.getDate() - dayOfWeek - 7);
134
- start.setHours(0, 0, 0, 0);
135
- end.setDate(start.getDate() + 6);
136
- end.setHours(23, 59, 59, 999);
137
- }
138
- else if (unit === 'month') {
139
- start.setMonth(start.getMonth() - 1);
140
- start.setDate(1);
141
- start.setHours(0, 0, 0, 0);
142
- end.setDate(0);
143
- end.setHours(23, 59, 59, 999);
144
- }
145
- else if (unit === 'year') {
146
- start.setFullYear(start.getFullYear() - 1);
147
- start.setMonth(0, 1);
148
- start.setHours(0, 0, 0, 0);
149
- end.setMonth(0, 0);
150
- end.setHours(23, 59, 59, 999);
151
- }
152
- return { start, end, description: `last ${unit}` };
153
- }
154
- // Handle day names: 'monday', 'last monday', 'tuesday', etc.
155
- const dayNames = ['sunday', 'monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday'];
156
- const dayMatch = normalized.match(/^(?:last\s+)?(sunday|monday|tuesday|wednesday|thursday|friday|saturday)$/);
157
- if (dayMatch) {
158
- const targetDay = dayNames.indexOf(dayMatch[1]);
159
- const isLast = normalized.startsWith('last ');
160
- const start = this.startOfDay(now);
161
- let daysDiff = start.getDay() - targetDay;
162
- if (daysDiff <= 0) {
163
- daysDiff += 7;
164
- }
165
- if (isLast && daysDiff === 0) {
166
- daysDiff = 7;
167
- }
168
- start.setDate(start.getDate() - daysDiff);
169
- const end = this.endOfDay(start);
170
- return { start, end, description: isLast ? `last ${dayMatch[1]}` : dayMatch[1] };
171
- }
172
- // Handle date ranges: 'jan 15 to jan 20', '3/1 to 3/15', '2024-01-15 to 2024-01-20'
173
- const rangeMatch = normalized.match(/^(.+?)\s+(?:to|through|until|-)\s+(.+)$/);
174
- if (rangeMatch) {
175
- const startDate = this.parseDate(rangeMatch[1]);
176
- const endDate = this.parseDate(rangeMatch[2]);
177
- if (startDate && endDate) {
178
- return {
179
- start: this.startOfDay(startDate),
180
- end: this.endOfDay(endDate),
181
- description: `${rangeMatch[1]} to ${rangeMatch[2]}`
182
- };
183
- }
184
- }
185
- // Try to parse as single date
186
- const singleDate = this.parseDate(normalized);
187
- if (singleDate) {
188
- return {
189
- start: this.startOfDay(singleDate),
190
- end: this.endOfDay(singleDate),
191
- description: normalized
192
- };
193
- }
194
- return null;
195
- }
196
- parseDate(expr) {
197
- const normalized = expr.trim().toLowerCase();
198
- const now = new Date(this.referenceDate);
199
- // Try various date formats
200
- const formats = [
201
- // MM/DD or MM/DD/YY or MM/DD/YYYY
202
- { regex: /^(\d{1,2})\/(\d{1,2})(?:\/(\d{2,4}))?$/, fn: (m) => {
203
- const month = parseInt(m[1]) - 1;
204
- const day = parseInt(m[2]);
205
- let year = now.getFullYear();
206
- if (m[3]) {
207
- const y = parseInt(m[3]);
208
- year = y < 100 ? (y < 50 ? 2000 + y : 1900 + y) : y;
209
- }
210
- return new Date(year, month, day);
211
- } },
212
- // Month name + day (e.g., "january 15" or "jan 15")
213
- { regex: /^(jan|feb|mar|apr|may|jun|jul|aug|sep|oct|nov|dec)[a-z]*\s+(\d{1,2})(?:,?\s+(\d{4}))?$/i, fn: (m) => {
214
- const months = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct', 'nov', 'dec'];
215
- const month = months.indexOf(m[1].toLowerCase().slice(0, 3));
216
- const day = parseInt(m[2]);
217
- let year = now.getFullYear();
218
- if (m[3])
219
- year = parseInt(m[3]);
220
- return new Date(year, month, day);
221
- } },
222
- // ISO date YYYY-MM-DD
223
- { regex: /^(\d{4})-(\d{2})-(\d{2})$/, fn: (m) => {
224
- return new Date(parseInt(m[1]), parseInt(m[2]) - 1, parseInt(m[3]));
225
- } }
226
- ];
227
- for (const format of formats) {
228
- const match = normalized.match(format.regex);
229
- if (match) {
230
- const date = format.fn(match);
231
- if (!isNaN(date.getTime()))
232
- return date;
233
- }
234
- }
235
- return null;
236
- }
237
- startOfDay(date) {
238
- const d = new Date(date);
239
- d.setHours(0, 0, 0, 0);
240
- return d;
241
- }
242
- endOfDay(date) {
243
- const d = new Date(date);
244
- d.setHours(23, 59, 59, 999);
245
- return d;
246
- }
247
- }
248
- exports.TemporalQuery = TemporalQuery;
249
- /**
250
- * Engram - Persistent memory for AI assistants
3
+ * Engram - Persistent semantic memory for AI agents
251
4
  *
252
- * A lightweight, SQLite-backed memory system that gives your AI assistants
253
- * the ability to remember conversations across sessions.
5
+ * v0.4 adds memory consolidation working memories are periodically
6
+ * summarized into long-term memories by a local LLM, keeping context
7
+ * dense and relevant as conversations grow.
254
8
  *
255
9
  * @example
256
10
  * ```typescript
257
11
  * import { Engram } from '@cartisien/engram';
258
12
  *
259
- * const memory = new Engram({ dbPath: './memory.db' });
260
- *
261
- * // Store a memory
262
- * await memory.remember('user_123', 'Jeff loves Triumph motorcycles', 'user');
13
+ * const memory = new Engram({
14
+ * dbPath: './memory.db',
15
+ * autoConsolidate: true,
16
+ * consolidateThreshold: 100,
17
+ * });
263
18
  *
264
- * // Retrieve with temporal query
265
- * const yesterday = await memory.recallByTime('user_123', 'yesterday');
266
- * const lastWeek = await memory.recallByTime('user_123', 'last week');
19
+ * // Manual consolidation
20
+ * const result = await memory.consolidate('session_1');
21
+ * // { summarized: 50, created: 4, archived: 50 }
267
22
  * ```
268
23
  */
269
- class Engram {
270
- db;
271
- maxContextLength;
272
- dbPath;
273
- initialized = false;
24
+ export class Engram {
274
25
  constructor(config = {}) {
26
+ this.initialized = false;
275
27
  this.dbPath = config.dbPath || ':memory:';
276
28
  this.maxContextLength = config.maxContextLength || 4000;
29
+ this.embeddingUrl = config.embeddingUrl || 'http://192.168.68.73:11434';
30
+ this.embeddingModel = config.embeddingModel || 'nomic-embed-text';
31
+ this.semanticSearch = config.semanticSearch !== false;
32
+ this.graphMemory = config.graphMemory === true;
33
+ this.graphModel = config.graphModel || 'qwen2.5:32b';
34
+ this.autoConsolidate = config.autoConsolidate === true;
35
+ this.consolidateThreshold = config.consolidateThreshold ?? 100;
36
+ this.consolidateKeep = config.consolidateKeep ?? 20;
37
+ this.consolidateBatch = config.consolidateBatch ?? 50;
38
+ this.consolidateModel = config.consolidateModel || config.graphModel || 'qwen2.5:32b';
277
39
  }
278
40
  async init() {
279
41
  if (this.initialized)
@@ -284,7 +46,7 @@ class Engram {
284
46
  filename: this.dbPath,
285
47
  driver: sqlite3.Database
286
48
  });
287
- // Create memories table
49
+ // Memories table
288
50
  await this.db.exec(`
289
51
  CREATE TABLE IF NOT EXISTS memories (
290
52
  id TEXT PRIMARY KEY,
@@ -293,83 +55,323 @@ class Engram {
293
55
  role TEXT CHECK(role IN ('user', 'assistant', 'system')),
294
56
  timestamp INTEGER NOT NULL,
295
57
  metadata TEXT,
296
- content_hash TEXT NOT NULL
58
+ content_hash TEXT NOT NULL,
59
+ embedding TEXT,
60
+ tier TEXT NOT NULL DEFAULT 'working',
61
+ consolidated_from TEXT
62
+ );
63
+ `);
64
+ // Migrations for existing databases
65
+ const migrations = [
66
+ `ALTER TABLE memories ADD COLUMN embedding TEXT`,
67
+ `ALTER TABLE memories ADD COLUMN tier TEXT NOT NULL DEFAULT 'working'`,
68
+ `ALTER TABLE memories ADD COLUMN consolidated_from TEXT`,
69
+ ];
70
+ for (const m of migrations) {
71
+ try {
72
+ await this.db.exec(m);
73
+ }
74
+ catch { /* column exists */ }
75
+ }
76
+ // v0.3: Graph tables
77
+ await this.db.exec(`
78
+ CREATE TABLE IF NOT EXISTS graph_nodes (
79
+ id TEXT PRIMARY KEY,
80
+ session_id TEXT NOT NULL,
81
+ entity TEXT NOT NULL,
82
+ type TEXT,
83
+ created_at INTEGER NOT NULL
84
+ );
85
+ CREATE UNIQUE INDEX IF NOT EXISTS idx_node_entity
86
+ ON graph_nodes(session_id, entity);
87
+ `);
88
+ await this.db.exec(`
89
+ CREATE TABLE IF NOT EXISTS graph_edges (
90
+ id TEXT PRIMARY KEY,
91
+ session_id TEXT NOT NULL,
92
+ from_entity TEXT NOT NULL,
93
+ relation TEXT NOT NULL,
94
+ to_entity TEXT NOT NULL,
95
+ confidence REAL DEFAULT 1.0,
96
+ memory_id TEXT,
97
+ created_at INTEGER NOT NULL
297
98
  );
99
+ CREATE INDEX IF NOT EXISTS idx_edge_from
100
+ ON graph_edges(session_id, from_entity);
101
+ CREATE INDEX IF NOT EXISTS idx_edge_to
102
+ ON graph_edges(session_id, to_entity);
298
103
  `);
299
- // Create index for fast session lookups
300
104
  await this.db.exec(`
301
- CREATE INDEX IF NOT EXISTS idx_session_timestamp
105
+ CREATE INDEX IF NOT EXISTS idx_session_timestamp
302
106
  ON memories(session_id, timestamp DESC);
303
107
  `);
304
- // Create index for content search
305
108
  await this.db.exec(`
306
- CREATE INDEX IF NOT EXISTS idx_content
307
- ON memories(content);
109
+ CREATE INDEX IF NOT EXISTS idx_session_tier
110
+ ON memories(session_id, tier);
308
111
  `);
309
112
  this.initialized = true;
310
113
  }
114
+ async embed(text) {
115
+ try {
116
+ const response = await fetch(`${this.embeddingUrl}/api/embeddings`, {
117
+ method: 'POST',
118
+ headers: { 'Content-Type': 'application/json' },
119
+ body: JSON.stringify({ model: this.embeddingModel, prompt: text }),
120
+ signal: AbortSignal.timeout(5000)
121
+ });
122
+ if (!response.ok)
123
+ return null;
124
+ const data = await response.json();
125
+ return data.embedding ?? null;
126
+ }
127
+ catch {
128
+ return null;
129
+ }
130
+ }
131
+ async extractGraph(text) {
132
+ const prompt = `Extract entity-relationship triples from this text. Return ONLY a JSON array of objects with keys: "from", "relation", "to". Be concise. Max 5 triples. If nothing to extract, return [].
133
+
134
+ Text: "${text}"
135
+
136
+ JSON array:`;
137
+ try {
138
+ const response = await fetch(`${this.embeddingUrl}/api/generate`, {
139
+ method: 'POST',
140
+ headers: { 'Content-Type': 'application/json' },
141
+ body: JSON.stringify({
142
+ model: this.graphModel,
143
+ prompt,
144
+ stream: false,
145
+ options: { temperature: 0, num_predict: 200 }
146
+ }),
147
+ signal: AbortSignal.timeout(15000)
148
+ });
149
+ if (!response.ok)
150
+ return [];
151
+ const data = await response.json();
152
+ const raw = data.response.trim();
153
+ const match = raw.match(/\[[\s\S]*\]/);
154
+ if (!match)
155
+ return [];
156
+ const triples = JSON.parse(match[0]);
157
+ return triples
158
+ .filter(t => t.from && t.relation && t.to)
159
+ .map(t => ({
160
+ from: t.from.toLowerCase().trim(),
161
+ relation: t.relation.toLowerCase().trim(),
162
+ to: t.to.toLowerCase().trim(),
163
+ confidence: 0.9
164
+ }));
165
+ }
166
+ catch {
167
+ return [];
168
+ }
169
+ }
170
+ async upsertNode(sessionId, entity, type) {
171
+ const id = createHash('sha256').update(`${sessionId}:${entity}`).digest('hex').slice(0, 16);
172
+ await this.db.run(`INSERT OR IGNORE INTO graph_nodes (id, session_id, entity, type, created_at)
173
+ VALUES (?, ?, ?, ?, ?)`, [id, sessionId, entity, type || null, Date.now()]);
174
+ }
175
+ async storeEdge(sessionId, edge, memoryId) {
176
+ const id = createHash('sha256')
177
+ .update(`${sessionId}:${edge.from}:${edge.relation}:${edge.to}`)
178
+ .digest('hex').slice(0, 16);
179
+ await this.upsertNode(sessionId, edge.from);
180
+ await this.upsertNode(sessionId, edge.to);
181
+ await this.db.run(`INSERT OR REPLACE INTO graph_edges
182
+ (id, session_id, from_entity, relation, to_entity, confidence, memory_id, created_at)
183
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)`, [id, sessionId, edge.from, edge.relation, edge.to, edge.confidence ?? 1.0, memoryId, Date.now()]);
184
+ }
185
+ cosineSimilarity(a, b) {
186
+ let dot = 0, magA = 0, magB = 0;
187
+ for (let i = 0; i < a.length; i++) {
188
+ dot += (a[i] ?? 0) * (b[i] ?? 0);
189
+ magA += (a[i] ?? 0) * (a[i] ?? 0);
190
+ magB += (b[i] ?? 0) * (b[i] ?? 0);
191
+ }
192
+ const denom = Math.sqrt(magA) * Math.sqrt(magB);
193
+ return denom === 0 ? 0 : dot / denom;
194
+ }
195
+ /**
196
+ * Call LLM to summarize a batch of memories into consolidated entries.
197
+ * Returns an array of summary strings (typically 2-5 per batch).
198
+ */
199
+ async summarizeMemories(entries, model) {
200
+ const numbered = entries
201
+ .map((e, i) => `[${i + 1}] (${e.role}) ${e.content}`)
202
+ .join('\n');
203
+ const prompt = `You are a memory consolidation system. Given these conversation memories, produce 2-5 concise summary entries that preserve all important facts: names, dates, decisions, preferences, and technical details. Each summary should be a single dense sentence or short paragraph. Return ONLY a JSON array of strings.
204
+
205
+ Memories:
206
+ ${numbered}
207
+
208
+ JSON array of summary strings:`;
209
+ try {
210
+ const response = await fetch(`${this.embeddingUrl}/api/generate`, {
211
+ method: 'POST',
212
+ headers: { 'Content-Type': 'application/json' },
213
+ body: JSON.stringify({
214
+ model,
215
+ prompt,
216
+ stream: false,
217
+ options: { temperature: 0.2, num_predict: 800 }
218
+ }),
219
+ signal: AbortSignal.timeout(60000)
220
+ });
221
+ if (!response.ok)
222
+ return [];
223
+ const data = await response.json();
224
+ const raw = data.response.trim();
225
+ const match = raw.match(/\[[\s\S]*\]/);
226
+ if (!match)
227
+ return [];
228
+ const summaries = JSON.parse(match[0]);
229
+ return summaries.filter(s => typeof s === 'string' && s.trim().length > 0);
230
+ }
231
+ catch {
232
+ return [];
233
+ }
234
+ }
311
235
  /**
312
- * Store a memory entry
236
+ * v0.4: Consolidate working memories into long-term summaries.
237
+ *
238
+ * Takes the oldest `batch` working memories (excluding the `keep` most recent),
239
+ * summarizes them via LLM, stores summaries as `long_term` tier, and archives
240
+ * the originals.
241
+ *
242
+ * @example
243
+ * ```typescript
244
+ * const result = await memory.consolidate('session_1');
245
+ * // → { summarized: 50, created: 4, archived: 50 }
246
+ *
247
+ * // Preview without writing
248
+ * const preview = await memory.consolidate('session_1', { dryRun: true });
249
+ * // → { summarized: 50, created: 0, archived: 0, previews: ['...', '...'] }
250
+ * ```
251
+ */
252
+ async consolidate(sessionId, options = {}) {
253
+ await this.init();
254
+ const batch = options.batch ?? this.consolidateBatch;
255
+ const keep = options.keep ?? this.consolidateKeep;
256
+ const model = options.model ?? this.consolidateModel;
257
+ // Fetch working memories oldest-first, excluding the N most recent
258
+ const rows = await this.db.all(`SELECT id, session_id, content, role, timestamp, metadata, tier, consolidated_from
259
+ FROM memories
260
+ WHERE session_id = ? AND tier = 'working'
261
+ ORDER BY timestamp ASC
262
+ LIMIT ?`, [sessionId, batch + keep]);
263
+ // Drop the most recent `keep` entries — leave them as working memory
264
+ const candidates = rows.slice(0, Math.max(0, rows.length - keep));
265
+ if (candidates.length === 0) {
266
+ return { summarized: 0, created: 0, archived: 0 };
267
+ }
268
+ const entries = candidates.map((row) => ({
269
+ id: row.id,
270
+ sessionId: row.session_id,
271
+ content: row.content,
272
+ role: row.role,
273
+ timestamp: new Date(row.timestamp),
274
+ tier: row.tier,
275
+ metadata: row.metadata ? JSON.parse(row.metadata) : undefined,
276
+ }));
277
+ // Get summaries from LLM
278
+ const summaries = await this.summarizeMemories(entries, model);
279
+ if (summaries.length === 0) {
280
+ return { summarized: entries.length, created: 0, archived: 0 };
281
+ }
282
+ if (options.dryRun) {
283
+ return {
284
+ summarized: entries.length,
285
+ created: 0,
286
+ archived: 0,
287
+ previews: summaries
288
+ };
289
+ }
290
+ const sourceIds = entries.map(e => e.id);
291
+ const consolidatedFromJson = JSON.stringify(sourceIds);
292
+ // Store each summary as a long_term memory
293
+ for (const summary of summaries) {
294
+ const id = createHash('sha256')
295
+ .update(`${sessionId}:lt:${summary}:${Date.now()}`)
296
+ .digest('hex').slice(0, 16);
297
+ const contentHash = createHash('sha256').update(summary).digest('hex').slice(0, 16);
298
+ // Embed the summary
299
+ let embeddingJson = null;
300
+ if (this.semanticSearch) {
301
+ const vector = await this.embed(summary);
302
+ if (vector)
303
+ embeddingJson = JSON.stringify(vector);
304
+ }
305
+ await this.db.run(`INSERT INTO memories
306
+ (id, session_id, content, role, timestamp, metadata, content_hash, embedding, tier, consolidated_from)
307
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, 'long_term', ?)`, [id, sessionId, summary.slice(0, this.maxContextLength), 'system',
308
+ Date.now(), null, contentHash, embeddingJson, consolidatedFromJson]);
309
+ }
310
+ // Archive the originals
311
+ const placeholders = sourceIds.map(() => '?').join(',');
312
+ await this.db.run(`UPDATE memories SET tier = 'archived' WHERE id IN (${placeholders})`, sourceIds);
313
+ return {
314
+ summarized: entries.length,
315
+ created: summaries.length,
316
+ archived: entries.length
317
+ };
318
+ }
319
+ /**
320
+ * Store a memory entry. With autoConsolidate enabled, triggers consolidation
321
+ * when working memory count exceeds the configured threshold.
313
322
  */
314
323
  async remember(sessionId, content, role = 'user', metadata) {
315
324
  await this.init();
316
- const id = (0, crypto_1.createHash)('sha256')
325
+ const id = createHash('sha256')
317
326
  .update(`${sessionId}:${content}:${Date.now()}`)
318
- .digest('hex')
319
- .slice(0, 16);
320
- const contentHash = (0, crypto_1.createHash)('sha256')
321
- .update(content)
322
- .digest('hex')
323
- .slice(0, 16);
327
+ .digest('hex').slice(0, 16);
328
+ const contentHash = createHash('sha256').update(content).digest('hex').slice(0, 16);
329
+ const truncated = content.slice(0, this.maxContextLength);
330
+ let embeddingJson = null;
331
+ if (this.semanticSearch) {
332
+ const vector = await this.embed(truncated);
333
+ if (vector)
334
+ embeddingJson = JSON.stringify(vector);
335
+ }
324
336
  const entry = {
325
- id,
326
- sessionId,
327
- content: content.slice(0, this.maxContextLength),
328
- role,
329
- timestamp: new Date(),
330
- metadata
337
+ id, sessionId, content: truncated, role,
338
+ timestamp: new Date(), tier: 'working',
339
+ ...(metadata !== undefined && { metadata })
331
340
  };
332
- await this.db.run(`INSERT INTO memories (id, session_id, content, role, timestamp, metadata, content_hash)
333
- VALUES (?, ?, ?, ?, ?, ?, ?)`, [
334
- entry.id,
335
- entry.sessionId,
336
- entry.content,
337
- entry.role,
338
- entry.timestamp.getTime(),
339
- metadata ? JSON.stringify(metadata) : null,
340
- contentHash
341
- ]);
341
+ await this.db.run(`INSERT INTO memories
342
+ (id, session_id, content, role, timestamp, metadata, content_hash, embedding, tier)
343
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, 'working')`, [id, sessionId, truncated, role, entry.timestamp.getTime(),
344
+ metadata ? JSON.stringify(metadata) : null, contentHash, embeddingJson]);
345
+ // v0.3: Extract graph relationships
346
+ if (this.graphMemory) {
347
+ const edges = await this.extractGraph(truncated);
348
+ for (const edge of edges) {
349
+ await this.storeEdge(sessionId, edge, id);
350
+ }
351
+ }
352
+ // v0.4: Auto-consolidate if threshold exceeded
353
+ if (this.autoConsolidate) {
354
+ const countRow = await this.db.get(`SELECT COUNT(*) as count FROM memories WHERE session_id = ? AND tier = 'working'`, [sessionId]);
355
+ if ((countRow?.count ?? 0) > this.consolidateThreshold) {
356
+ // Fire-and-forget — don't block the caller
357
+ this.consolidate(sessionId).catch(() => { });
358
+ }
359
+ }
342
360
  return entry;
343
361
  }
344
362
  /**
345
- * Recall memories for a session
346
- *
347
- * Supports temporal queries via options.temporalQuery:
348
- * - 'yesterday', 'today', 'tomorrow'
349
- * - '3 days ago', 'a week ago', '2 weeks ago'
350
- * - 'last monday', 'last week', 'last month'
351
- * - 'this week', 'this month'
352
- * - 'last 3 days', 'last week'
353
- * - 'january 15', '3/15', '2024-01-15'
354
- * - 'jan 15 to jan 20', '3/1 to 3/15'
363
+ * Recall memories. Searches working and long_term tiers by default.
364
+ * Archived memories (consolidated originals) are excluded unless explicitly requested.
355
365
  */
356
366
  async recall(sessionId, query, limit = 10, options = {}) {
357
367
  await this.init();
358
- // Handle temporal query if provided
359
- if (options.temporalQuery) {
360
- const temporal = new TemporalQuery(new Date(), options.timezoneOffset);
361
- const range = temporal.parse(options.temporalQuery);
362
- if (range) {
363
- options.after = range.start;
364
- options.before = range.end;
365
- }
366
- }
368
+ const tiers = options.tiers ?? ['working', 'long_term'];
369
+ const tierPlaceholders = tiers.map(() => '?').join(',');
367
370
  let sql = `
368
- SELECT id, session_id, content, role, timestamp, metadata
369
- FROM memories
370
- WHERE session_id = ?
371
+ SELECT id, session_id, content, role, timestamp, metadata, embedding, tier, consolidated_from
372
+ FROM memories WHERE session_id = ? AND tier IN (${tierPlaceholders})
371
373
  `;
372
- const params = [sessionId];
374
+ const params = [sessionId, ...tiers];
373
375
  if (options.role) {
374
376
  sql += ` AND role = ?`;
375
377
  params.push(options.role);
@@ -382,7 +384,49 @@ class Engram {
382
384
  sql += ` AND timestamp <= ?`;
383
385
  params.push(options.before.getTime());
384
386
  }
385
- // Simple keyword matching if query provided
387
+ const mapRow = (row, similarity) => {
388
+ const entry = {
389
+ id: row.id,
390
+ sessionId: row.session_id,
391
+ content: row.content,
392
+ role: row.role,
393
+ timestamp: new Date(row.timestamp),
394
+ tier: row.tier,
395
+ };
396
+ if (row.consolidated_from)
397
+ entry.consolidatedFrom = JSON.parse(row.consolidated_from);
398
+ if (row.metadata)
399
+ entry.metadata = JSON.parse(row.metadata);
400
+ if (similarity !== undefined)
401
+ entry.similarity = similarity;
402
+ return entry;
403
+ };
404
+ // Semantic search
405
+ if (query && query.trim() && this.semanticSearch) {
406
+ const queryVector = await this.embed(query);
407
+ if (queryVector) {
408
+ const rows = await this.db.all(sql + ` ORDER BY timestamp DESC`, params);
409
+ const scored = rows
410
+ .map((row) => {
411
+ let similarity = 0;
412
+ if (row.embedding) {
413
+ try {
414
+ similarity = this.cosineSimilarity(queryVector, JSON.parse(row.embedding));
415
+ }
416
+ catch { /* skip */ }
417
+ }
418
+ return { row, similarity };
419
+ })
420
+ .sort((a, b) => b.similarity - a.similarity)
421
+ .slice(0, limit);
422
+ const results = scored.map(({ row, similarity }) => mapRow(row, similarity));
423
+ if (this.graphMemory && options.includeGraph !== false) {
424
+ return this.augmentWithGraph(sessionId, results, limit);
425
+ }
426
+ return results;
427
+ }
428
+ }
429
+ // Keyword fallback
386
430
  if (query && query.trim()) {
387
431
  const keywords = query.toLowerCase().split(/\s+/).filter(k => k.length > 2);
388
432
  if (keywords.length > 0) {
@@ -393,127 +437,99 @@ class Engram {
393
437
  sql += ` ORDER BY timestamp DESC LIMIT ?`;
394
438
  params.push(limit);
395
439
  const rows = await this.db.all(sql, params);
396
- return rows.map((row) => ({
440
+ return rows.map((row) => mapRow(row));
441
+ }
442
+ async augmentWithGraph(sessionId, results, limit) {
443
+ const seenIds = new Set(results.map(r => r.id));
444
+ const graphMemoryIds = new Set();
445
+ for (const result of results.slice(0, 3)) {
446
+ const edges = await this.db.all(`SELECT memory_id FROM graph_edges WHERE session_id = ? AND memory_id IS NOT NULL
447
+ AND (from_entity IN (
448
+ SELECT from_entity FROM graph_edges WHERE memory_id = ?
449
+ UNION SELECT to_entity FROM graph_edges WHERE memory_id = ?
450
+ ))
451
+ LIMIT 5`, [sessionId, result.id, result.id]);
452
+ for (const edge of edges) {
453
+ if (edge.memory_id && !seenIds.has(edge.memory_id)) {
454
+ graphMemoryIds.add(edge.memory_id);
455
+ }
456
+ }
457
+ }
458
+ if (graphMemoryIds.size === 0)
459
+ return results;
460
+ const placeholders = Array.from(graphMemoryIds).map(() => '?').join(',');
461
+ const connectedRows = await this.db.all(`SELECT id, session_id, content, role, timestamp, metadata, tier, consolidated_from
462
+ FROM memories WHERE id IN (${placeholders})`, Array.from(graphMemoryIds));
463
+ const connected = connectedRows.map((row) => ({
397
464
  id: row.id,
398
465
  sessionId: row.session_id,
399
466
  content: row.content,
400
467
  role: row.role,
401
468
  timestamp: new Date(row.timestamp),
402
- metadata: row.metadata ? JSON.parse(row.metadata) : undefined
469
+ tier: row.tier,
470
+ consolidatedFrom: row.consolidated_from ? JSON.parse(row.consolidated_from) : undefined,
471
+ metadata: row.metadata ? JSON.parse(row.metadata) : undefined,
472
+ similarity: 0
403
473
  }));
474
+ return [...results, ...connected].slice(0, limit);
404
475
  }
405
- /**
406
- * Recall memories by natural language time expression
407
- *
408
- * @example
409
- * ```typescript
410
- * // Get yesterday's memories
411
- * const yesterday = await memory.recallByTime('session_123', 'yesterday');
412
- *
413
- * // Get last week's memories
414
- * const lastWeek = await memory.recallByTime('session_123', 'last week');
415
- *
416
- * // Get memories from 3 days ago
417
- * const threeDaysAgo = await memory.recallByTime('session_123', '3 days ago');
418
- * ```
419
- */
420
- async recallByTime(sessionId, temporalQuery, query, limit = 50, options = {}) {
421
- const temporal = new TemporalQuery(new Date(), options.timezoneOffset);
422
- const range = temporal.parse(temporalQuery);
423
- if (!range) {
424
- throw new Error(`Unable to parse temporal query: "${temporalQuery}"`);
425
- }
426
- const entries = await this.recall(sessionId, query, limit, {
427
- ...options,
428
- after: range.start,
429
- before: range.end
430
- });
431
- return { entries, range };
432
- }
433
- /**
434
- * Get memories from the last N days
435
- */
436
- async recallRecent(sessionId, days = 7, query, limit = 50, options = {}) {
437
- const since = new Date();
438
- since.setDate(since.getDate() - days);
439
- since.setHours(0, 0, 0, 0);
440
- const entries = await this.recall(sessionId, query, limit, {
441
- ...options,
442
- after: since
443
- });
444
- return { entries, days, since };
445
- }
446
- /**
447
- * Get memories since a specific date
448
- */
449
- async recallSince(sessionId, since, query, limit = 50, options = {}) {
450
- const entries = await this.recall(sessionId, query, limit, {
451
- ...options,
452
- after: since
453
- });
454
- return { entries, since, count: entries.length };
455
- }
456
- /**
457
- * Get memories between two dates
458
- */
459
- async recallBetween(sessionId, start, end, query, limit = 50, options = {}) {
460
- const entries = await this.recall(sessionId, query, limit, {
461
- ...options,
462
- after: start,
463
- before: end
464
- });
465
- return { entries, start, end, count: entries.length };
466
- }
467
- /**
468
- * Get a daily summary of memories
469
- *
470
- * Returns memories grouped by day, useful for "what happened each day" views
471
- */
472
- async dailySummary(sessionId, days = 7) {
476
+ async graph(sessionId, entity) {
473
477
  await this.init();
474
- const since = new Date();
475
- since.setDate(since.getDate() - days);
476
- since.setHours(0, 0, 0, 0);
477
- const entries = await this.recall(sessionId, undefined, 1000, { after: since });
478
- // Group by day
479
- const grouped = new Map();
480
- for (const entry of entries) {
481
- const dateKey = entry.timestamp.toISOString().split('T')[0];
482
- if (!grouped.has(dateKey)) {
483
- grouped.set(dateKey, []);
484
- }
485
- grouped.get(dateKey).push(entry);
478
+ const ent = entity.toLowerCase().trim();
479
+ const outgoing = await this.db.all(`SELECT relation, to_entity, confidence, memory_id FROM graph_edges
480
+ WHERE session_id = ? AND from_entity = ?`, [sessionId, ent]);
481
+ const incoming = await this.db.all(`SELECT relation, from_entity, confidence, memory_id FROM graph_edges
482
+ WHERE session_id = ? AND to_entity = ?`, [sessionId, ent]);
483
+ const relationships = [
484
+ ...outgoing.map((e) => ({
485
+ type: 'outgoing',
486
+ relation: e.relation,
487
+ target: e.to_entity,
488
+ confidence: e.confidence
489
+ })),
490
+ ...incoming.map((e) => ({
491
+ type: 'incoming',
492
+ relation: e.relation,
493
+ target: e.from_entity,
494
+ confidence: e.confidence
495
+ }))
496
+ ];
497
+ const memoryIds = [
498
+ ...outgoing.map((e) => e.memory_id),
499
+ ...incoming.map((e) => e.memory_id)
500
+ ].filter(Boolean);
501
+ let relatedMemories = [];
502
+ if (memoryIds.length > 0) {
503
+ const placeholders = memoryIds.map(() => '?').join(',');
504
+ const rows = await this.db.all(`SELECT id, session_id, content, role, timestamp, metadata, tier, consolidated_from
505
+ FROM memories WHERE id IN (${placeholders})`, memoryIds);
506
+ relatedMemories = rows.map((row) => ({
507
+ id: row.id,
508
+ sessionId: row.session_id,
509
+ content: row.content,
510
+ role: row.role,
511
+ timestamp: new Date(row.timestamp),
512
+ tier: row.tier,
513
+ consolidatedFrom: row.consolidated_from ? JSON.parse(row.consolidated_from) : undefined,
514
+ metadata: row.metadata ? JSON.parse(row.metadata) : undefined
515
+ }));
486
516
  }
487
- // Convert to sorted array
488
- return Array.from(grouped.entries())
489
- .sort((a, b) => b[0].localeCompare(a[0])) // Descending date order
490
- .map(([dateKey, dayEntries]) => ({
491
- date: new Date(dateKey),
492
- entries: dayEntries.sort((a, b) => b.timestamp.getTime() - a.timestamp.getTime()),
493
- count: dayEntries.length
494
- }));
517
+ return { entity: ent, relationships, relatedMemories };
495
518
  }
496
- /**
497
- * Get recent conversation history for a session
498
- */
499
519
  async history(sessionId, limit = 20) {
500
520
  return this.recall(sessionId, undefined, limit, {});
501
521
  }
502
- /**
503
- * Forget (delete) memories
504
- */
505
522
  async forget(sessionId, options) {
506
523
  await this.init();
524
+ const tiers = options?.includeLongTerm
525
+ ? `('working', 'long_term', 'archived')`
526
+ : `('working', 'long_term')`;
507
527
  if (options?.id) {
508
- const result = await this.db.run('DELETE FROM memories WHERE session_id = ? AND id = ?', [sessionId, options.id]);
528
+ const result = await this.db.run(`DELETE FROM memories WHERE session_id = ? AND id = ?`, [sessionId, options.id]);
509
529
  return result.changes || 0;
510
530
  }
511
- let sql = 'DELETE FROM memories WHERE session_id = ?';
531
+ let sql = `DELETE FROM memories WHERE session_id = ? AND tier IN ${tiers}`;
512
532
  const params = [sessionId];
513
- if (options?.after) {
514
- sql += ' AND timestamp >= ?';
515
- params.push(options.after.getTime());
516
- }
517
533
  if (options?.before) {
518
534
  sql += ' AND timestamp < ?';
519
535
  params.push(options.before.getTime());
@@ -521,60 +537,35 @@ class Engram {
521
537
  const result = await this.db.run(sql, params);
522
538
  return result.changes || 0;
523
539
  }
524
- /**
525
- * Get memory statistics for a session
526
- */
527
540
  async stats(sessionId) {
528
541
  await this.init();
529
- const totalRow = await this.db.get('SELECT COUNT(*) as count FROM memories WHERE session_id = ?', [sessionId]);
530
- const total = totalRow?.count || 0;
531
- const roleRows = await this.db.all('SELECT role, COUNT(*) as count FROM memories WHERE session_id = ? GROUP BY role', [sessionId]);
542
+ const totalRow = await this.db.get(`SELECT COUNT(*) as count FROM memories WHERE session_id = ? AND tier != 'archived'`, [sessionId]);
543
+ const roleRows = await this.db.all(`SELECT role, COUNT(*) as count FROM memories WHERE session_id = ? AND tier != 'archived' GROUP BY role`, [sessionId]);
544
+ const tierRows = await this.db.all(`SELECT tier, COUNT(*) as count FROM memories WHERE session_id = ? GROUP BY tier`, [sessionId]);
532
545
  const byRole = {};
533
- roleRows.forEach((row) => {
534
- byRole[row.role] = row.count;
535
- });
536
- const range = await this.db.get('SELECT MIN(timestamp) as oldest, MAX(timestamp) as newest FROM memories WHERE session_id = ?', [sessionId]);
537
- return {
538
- total,
546
+ roleRows.forEach((row) => { byRole[row.role] = row.count; });
547
+ const byTier = { working: 0, long_term: 0, archived: 0 };
548
+ tierRows.forEach((row) => { byTier[row.tier] = row.count; });
549
+ const range = await this.db.get(`SELECT MIN(timestamp) as oldest, MAX(timestamp) as newest
550
+ FROM memories WHERE session_id = ? AND tier != 'archived'`, [sessionId]);
551
+ const embRow = await this.db.get(`SELECT COUNT(*) as count FROM memories
552
+ WHERE session_id = ? AND tier != 'archived' AND embedding IS NOT NULL`, [sessionId]);
553
+ const result = {
554
+ total: totalRow?.count || 0,
539
555
  byRole,
556
+ byTier,
540
557
  oldest: range?.oldest ? new Date(range.oldest) : null,
541
- newest: range?.newest ? new Date(range.newest) : null
558
+ newest: range?.newest ? new Date(range.newest) : null,
559
+ withEmbeddings: embRow?.count || 0,
542
560
  };
543
- }
544
- /**
545
- * Get temporal statistics for a session
546
- *
547
- * Returns memory counts grouped by day, useful for activity visualization
548
- */
549
- async temporalStats(sessionId, days = 30) {
550
- await this.init();
551
- const since = new Date();
552
- since.setDate(since.getDate() - days);
553
- const rows = await this.db.all(`SELECT
554
- date(timestamp / 1000, 'unixepoch', 'localtime') as date,
555
- role,
556
- COUNT(*) as count
557
- FROM memories
558
- WHERE session_id = ? AND timestamp >= ?
559
- GROUP BY date, role
560
- ORDER BY date DESC`, [sessionId, since.getTime()]);
561
- // Aggregate by date
562
- const byDate = new Map();
563
- for (const row of rows) {
564
- if (!byDate.has(row.date)) {
565
- byDate.set(row.date, { count: 0, byRole: {} });
566
- }
567
- const day = byDate.get(row.date);
568
- day.count += row.count;
569
- day.byRole[row.role] = row.count;
561
+ if (this.graphMemory) {
562
+ const nodeRow = await this.db.get(`SELECT COUNT(*) as count FROM graph_nodes WHERE session_id = ?`, [sessionId]);
563
+ const edgeRow = await this.db.get(`SELECT COUNT(*) as count FROM graph_edges WHERE session_id = ?`, [sessionId]);
564
+ result.graphNodes = nodeRow?.count || 0;
565
+ result.graphEdges = edgeRow?.count || 0;
570
566
  }
571
- return Array.from(byDate.entries())
572
- .sort((a, b) => b[0].localeCompare(a[0]))
573
- .map(([date, stats]) => ({ date, ...stats }));
567
+ return result;
574
568
  }
575
- /**
576
- * Close the database connection
577
- */
578
569
  async close() {
579
570
  if (this.db) {
580
571
  await this.db.close();
@@ -582,6 +573,5 @@ class Engram {
582
573
  }
583
574
  }
584
575
  }
585
- exports.Engram = Engram;
586
- exports.default = Engram;
576
+ export default Engram;
587
577
  //# sourceMappingURL=index.js.map