@jeremiaheth/neolata-mem 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 Jeremiaheth
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,73 @@
1
+ # @jeremiaheth/neolata-mem
2
+
3
+ A-MEM style memory graph engine for AI agents. Semantic auto-linking, decay, conflict resolution, and cross-agent traversal.
4
+
5
+ ## Install
6
+
7
+ ```bash
8
+ npm install @jeremiaheth/neolata-mem
9
+ ```
10
+
11
+ ## Quick Start
12
+
13
+ ```js
14
+ import { MemoryGraph, NvidiaEmbeddingProvider, JsonFileStorage, LLMExtractionProvider } from '@jeremiaheth/neolata-mem';
15
+
16
+ const graph = new MemoryGraph({
17
+ embedding: new NvidiaEmbeddingProvider({ apiKey: process.env.NVIDIA_API_KEY }),
18
+ storage: new JsonFileStorage('./memory-data'),
19
+ extraction: new LLMExtractionProvider({
20
+ baseUrl: 'http://localhost:18789/v1/chat/completions',
21
+ apiKey: process.env.OPENCLAW_TOKEN,
22
+ model: 'anthropic/claude-haiku-4-5'
23
+ })
24
+ });
25
+
26
+ // Store with auto-linking
27
+ const result = await graph.addWithLinking('agent-1', 'Port 443 is open on target', 'finding', 0.8, ['recon']);
28
+
29
+ // Semantic search
30
+ const results = await graph.search('open ports', 'agent-1');
31
+
32
+ // Evolve with conflict resolution
33
+ await graph.evolve('agent-1', 'Port 443 is closed on target', 'finding', 0.9, ['recon']);
34
+
35
+ // Graph health
36
+ const health = await graph.healthReport();
37
+ ```
38
+
39
+ ## Providers
40
+
41
+ ### Embedding
42
+ - `NvidiaEmbeddingProvider` — NVIDIA NIM (baai/bge-m3)
43
+ - `OpenAIEmbeddingProvider` — OpenAI or compatible APIs
44
+
45
+ ### Storage
46
+ - `JsonFileStorage` — local JSON files
47
+ - `SupabaseStorage` — Supabase REST API
48
+
49
+ ### Extraction
50
+ - `LLMExtractionProvider` — any OpenAI-compatible chat API
51
+ - `PassthroughExtractionProvider` — no LLM, wraps text as-is
52
+
53
+ ## API
54
+
55
+ | Method | Description |
56
+ |--------|-------------|
57
+ | `addWithLinking(agent, memory, category, importance, tags)` | Store with A-MEM auto-linking |
58
+ | `search(query, agent?, limit?)` | Semantic search |
59
+ | `evolve(agent, memory, category, importance, tags)` | Store with conflict resolution |
60
+ | `detectConflicts(agent, text)` | Check for contradictions |
61
+ | `decay(dryRun?)` | Run decay cycle |
62
+ | `reinforce(memoryId, boost?)` | Boost a memory |
63
+ | `traverse(startId, maxHops?)` | Multi-hop graph walk |
64
+ | `findClusters(minSize?)` | Detect connected components |
65
+ | `findPath(idA, idB)` | Shortest path |
66
+ | `findOrphans(agent?, maxLinks?)` | Unlinked memories |
67
+ | `timeline(agent?, days?)` | Timeline view |
68
+ | `healthReport()` | Full health report |
69
+ | `generateContext(agent, query, maxMemories?)` | LLM-ready context block |
70
+
71
+ ## License
72
+
73
+ MIT
package/package.json ADDED
@@ -0,0 +1,36 @@
1
+ {
2
+ "name": "@jeremiaheth/neolata-mem",
3
+ "version": "0.1.0",
4
+ "description": "A-MEM style memory graph engine with semantic linking, decay, conflict resolution, and cross-agent traversal",
5
+ "type": "module",
6
+ "main": "src/index.mjs",
7
+ "exports": {
8
+ ".": "./src/index.mjs",
9
+ "./providers/*": "./src/providers/*.mjs"
10
+ },
11
+ "files": [
12
+ "src/",
13
+ "LICENSE",
14
+ "README.md"
15
+ ],
16
+ "keywords": [
17
+ "memory-graph",
18
+ "a-mem",
19
+ "zettelkasten",
20
+ "semantic-search",
21
+ "embeddings",
22
+ "knowledge-graph",
23
+ "agent-memory",
24
+ "decay",
25
+ "conflict-resolution"
26
+ ],
27
+ "author": "Jeremiaheth",
28
+ "license": "MIT",
29
+ "repository": {
30
+ "type": "git",
31
+ "url": "https://github.com/Jeremiaheth/neolata-mem"
32
+ },
33
+ "engines": {
34
+ "node": ">=18.0.0"
35
+ }
36
+ }
package/src/index.mjs ADDED
@@ -0,0 +1,464 @@
1
+ /**
2
+ * @jeremiaheth/neolata-mem
3
+ *
4
+ * A-MEM style memory graph engine with:
5
+ * - Semantic auto-linking (Zettelkasten)
6
+ * - Multi-agent support
7
+ * - Memory decay (exponential with reinforcement)
8
+ * - Conflict resolution & evolution
9
+ * - Graph traversal, clustering, pathfinding
10
+ * - Context generation for LLM prompts
11
+ *
12
+ * @license MIT
13
+ */
14
+
15
+ export { NvidiaEmbeddingProvider, OpenAIEmbeddingProvider } from './providers/embedding.mjs';
16
+ export { JsonFileStorage, SupabaseStorage } from './providers/storage.mjs';
17
+ export { LLMExtractionProvider, PassthroughExtractionProvider } from './providers/extraction.mjs';
18
+
19
+ // ============================================================
20
+ // UTILS
21
+ // ============================================================
22
+
23
+ function cosineSimilarity(a, b) {
24
+ let dot = 0, normA = 0, normB = 0;
25
+ for (let i = 0; i < a.length; i++) {
26
+ dot += a[i] * b[i];
27
+ normA += a[i] * a[i];
28
+ normB += b[i] * b[i];
29
+ }
30
+ return dot / (Math.sqrt(normA) * Math.sqrt(normB));
31
+ }
32
+
33
+ function genId() {
34
+ return `mem_${Date.now()}_${Math.random().toString(36).slice(2, 8)}`;
35
+ }
36
+
37
+ // ============================================================
38
+ // MEMORY GRAPH
39
+ // ============================================================
40
+
41
+ export class MemoryGraph {
42
+ /**
43
+ * @param {Object} opts
44
+ * @param {import('./providers/embedding.mjs').EmbeddingProvider} opts.embedding
45
+ * @param {import('./providers/storage.mjs').StorageProvider} opts.storage
46
+ * @param {import('./providers/extraction.mjs').ExtractionProvider} [opts.extraction]
47
+ * @param {Object} [opts.config]
48
+ * @param {number} [opts.config.linkThreshold=0.5] — min cosine similarity for auto-linking
49
+ * @param {number} [opts.config.maxLinks=5] — max links per memory
50
+ * @param {number} [opts.config.decayHalfLifeDays=30]
51
+ * @param {number} [opts.config.archiveThreshold=0.15]
52
+ * @param {number} [opts.config.deleteThreshold=0.05]
53
+ */
54
+ constructor({ embedding, storage, extraction, config = {} } = {}) {
55
+ if (!embedding) throw new Error('embedding provider is required');
56
+ if (!storage) throw new Error('storage provider is required');
57
+ this.embedding = embedding;
58
+ this.storage = storage;
59
+ this.extraction = extraction;
60
+ this.config = {
61
+ linkThreshold: 0.5,
62
+ maxLinks: 5,
63
+ decayHalfLifeDays: 30,
64
+ archiveThreshold: 0.15,
65
+ deleteThreshold: 0.05,
66
+ ...config
67
+ };
68
+ this.memories = [];
69
+ this.loaded = false;
70
+ }
71
+
72
+ async init() {
73
+ if (this.loaded) return;
74
+ this.memories = await this.storage.load();
75
+ this.loaded = true;
76
+ }
77
+
78
+ async save() {
79
+ await this.storage.save(this.memories);
80
+ }
81
+
82
+ // ── Core: Add with A-MEM linking ─────────────────────────
83
+
84
+ async addWithLinking(agent, memory, category, importance, tags) {
85
+ await this.init();
86
+ const [emb] = await this.embedding.embed(memory);
87
+
88
+ const related = this.memories
89
+ .filter(m => m.embedding)
90
+ .map(m => ({ id: m.id, agent: m.agent, sim: cosineSimilarity(emb, m.embedding) }))
91
+ .filter(m => m.sim > this.config.linkThreshold)
92
+ .sort((a, b) => b.sim - a.sim)
93
+ .slice(0, this.config.maxLinks);
94
+
95
+ const id = genId();
96
+ const now = new Date().toISOString();
97
+
98
+ this.memories.push({
99
+ id, agent, memory, category, importance,
100
+ tags: tags || [],
101
+ embedding: emb,
102
+ links: related.map(l => ({ id: l.id, similarity: l.sim })),
103
+ created_at: now,
104
+ updated_at: now
105
+ });
106
+
107
+ // Backlinks
108
+ for (const link of related) {
109
+ const target = this.memories.find(m => m.id === link.id);
110
+ if (target) {
111
+ if (!target.links) target.links = [];
112
+ if (!target.links.find(l => l.id === id)) {
113
+ target.links.push({ id, similarity: link.sim });
114
+ }
115
+ target.updated_at = now;
116
+ }
117
+ }
118
+
119
+ await this.save();
120
+ return {
121
+ id,
122
+ links: related.length,
123
+ topLink: related[0]
124
+ ? `${related[0].id} (${(related[0].sim * 100).toFixed(1)}%, agent: ${related[0].agent})`
125
+ : 'none'
126
+ };
127
+ }
128
+
129
+ // ── Search ───────────────────────────────────────────────
130
+
131
+ async search(query, agent = null, limit = 10) {
132
+ await this.init();
133
+ const [qEmb] = await this.embedding.embed(query);
134
+ let candidates = agent ? this.memories.filter(m => m.agent === agent) : this.memories;
135
+
136
+ return candidates
137
+ .filter(m => m.embedding)
138
+ .map(m => ({ ...m, score: cosineSimilarity(qEmb, m.embedding), embedding: undefined }))
139
+ .sort((a, b) => b.score - a.score)
140
+ .slice(0, limit);
141
+ }
142
+
143
+ // ── Links ────────────────────────────────────────────────
144
+
145
+ async getLinks(memoryId) {
146
+ await this.init();
147
+ const mem = this.memories.find(m => m.id === memoryId);
148
+ if (!mem) return null;
149
+ return {
150
+ memory: mem.memory,
151
+ agent: mem.agent,
152
+ links: (mem.links || []).map(link => {
153
+ const t = this.memories.find(m => m.id === link.id);
154
+ return { id: link.id, similarity: link.similarity, memory: t?.memory || '(deleted)', agent: t?.agent || '?', category: t?.category || '?' };
155
+ })
156
+ };
157
+ }
158
+
159
+ // ── Decay ────────────────────────────────────────────────
160
+
161
+ calcStrength(mem) {
162
+ const now = Date.now();
163
+ const ageDays = (now - new Date(mem.created_at).getTime()) / 864e5;
164
+ const lastTouchDays = (now - new Date(mem.updated_at || mem.created_at).getTime()) / 864e5;
165
+ const base = mem.importance || 0.5;
166
+ const HL = this.config.decayHalfLifeDays;
167
+
168
+ const ageFactor = Math.max(0.1, Math.pow(0.5, ageDays / HL));
169
+ const touchFactor = Math.max(0.1, Math.pow(0.5, lastTouchDays / (HL * 2)));
170
+ const linkBonus = Math.min(0.3, (mem.links || []).length * 0.05);
171
+ const accessBonus = Math.min(0.2, (mem.accessCount || 0) * 0.02);
172
+ const categoryWeight = { decision: 1.3, preference: 1.4, insight: 1.1 }[mem.category] || 1.0;
173
+
174
+ const strength = Math.min(1.0, (base * ageFactor * touchFactor * categoryWeight) + linkBonus + accessBonus);
175
+ return { strength, ageDays, lastTouchDays, linkCount: (mem.links || []).length, base, ageFactor, touchFactor, categoryWeight };
176
+ }
177
+
178
+ async decay(dryRun = false) {
179
+ await this.init();
180
+ const { archiveThreshold: AT, deleteThreshold: DT } = this.config;
181
+ const report = { total: this.memories.length, healthy: 0, weakening: 0, archived: [], deleted: [], linksClean: 0 };
182
+ const toArchive = [], toDelete = [];
183
+
184
+ for (const mem of this.memories) {
185
+ const { strength } = this.calcStrength(mem);
186
+ if (strength < DT) { toDelete.push(mem); report.deleted.push({ id: mem.id, memory: mem.memory.slice(0, 80), strength: strength.toFixed(3), agent: mem.agent }); }
187
+ else if (strength < AT) { toArchive.push(mem); report.archived.push({ id: mem.id, memory: mem.memory.slice(0, 80), strength: strength.toFixed(3), agent: mem.agent }); }
188
+ else if (strength < 0.3) report.weakening++;
189
+ else report.healthy++;
190
+ }
191
+
192
+ if (!dryRun && (toArchive.length || toDelete.length)) {
193
+ let archived = await this.storage.loadArchive();
194
+ for (const mem of toArchive) { mem.archived_at = new Date().toISOString(); archived.push(mem); }
195
+ await this.storage.saveArchive(archived);
196
+
197
+ const removeIds = new Set([...toArchive, ...toDelete].map(m => m.id));
198
+ this.memories = this.memories.filter(m => !removeIds.has(m.id));
199
+ for (const mem of this.memories) {
200
+ const before = (mem.links || []).length;
201
+ mem.links = (mem.links || []).filter(l => !removeIds.has(l.id));
202
+ report.linksClean += before - mem.links.length;
203
+ }
204
+ await this.save();
205
+ }
206
+ return report;
207
+ }
208
+
209
+ // ── Reinforce ────────────────────────────────────────────
210
+
211
+ async reinforce(memoryId, boost = 0.1) {
212
+ await this.init();
213
+ const mem = this.memories.find(m => m.id === memoryId);
214
+ if (!mem) return null;
215
+ const oldImportance = mem.importance;
216
+ mem.importance = Math.min(1.0, (mem.importance || 0.5) + boost);
217
+ mem.accessCount = (mem.accessCount || 0) + 1;
218
+ mem.updated_at = new Date().toISOString();
219
+ await this.save();
220
+ const { strength } = this.calcStrength(mem);
221
+ return { id: mem.id, memory: mem.memory, oldImportance, newImportance: mem.importance, accessCount: mem.accessCount, strength: strength.toFixed(3) };
222
+ }
223
+
224
+ // ── Graph traversal ─────────────────────────────────────
225
+
226
+ async traverse(startId, maxHops = 2) {
227
+ await this.init();
228
+ const start = this.memories.find(m => m.id === startId);
229
+ if (!start) return null;
230
+ const visited = new Map();
231
+ const queue = [{ id: startId, hop: 0, similarity: 1.0 }];
232
+
233
+ while (queue.length) {
234
+ const { id, hop, similarity } = queue.shift();
235
+ if (visited.has(id)) continue;
236
+ const mem = this.memories.find(m => m.id === id);
237
+ if (!mem) continue;
238
+ visited.set(id, { hop, memory: mem.memory, agent: mem.agent, category: mem.category, importance: mem.importance, similarity, linkCount: (mem.links || []).length });
239
+ if (hop < maxHops) for (const link of (mem.links || [])) if (!visited.has(link.id)) queue.push({ id: link.id, hop: hop + 1, similarity: link.similarity });
240
+ }
241
+
242
+ return {
243
+ start: { id: startId, memory: start.memory, agent: start.agent },
244
+ hops: maxHops,
245
+ reached: visited.size,
246
+ nodes: [...visited.entries()].map(([id, data]) => ({ id, ...data })).sort((a, b) => a.hop - b.hop || b.similarity - a.similarity)
247
+ };
248
+ }
249
+
250
+ // ── Clusters ─────────────────────────────────────────────
251
+
252
+ async findClusters(minSize = 2) {
253
+ await this.init();
254
+ const visited = new Set();
255
+ const clusters = [];
256
+
257
+ for (const mem of this.memories) {
258
+ if (visited.has(mem.id)) continue;
259
+ const cluster = [];
260
+ const queue = [mem.id];
261
+ while (queue.length) {
262
+ const id = queue.shift();
263
+ if (visited.has(id)) continue;
264
+ visited.add(id);
265
+ const m = this.memories.find(x => x.id === id);
266
+ if (!m) continue;
267
+ cluster.push({ id: m.id, memory: m.memory, agent: m.agent, category: m.category, importance: m.importance });
268
+ for (const link of (m.links || [])) if (!visited.has(link.id)) queue.push(link.id);
269
+ }
270
+ if (cluster.length >= minSize) {
271
+ const tagCounts = {}, agentCounts = {};
272
+ for (const c of cluster) {
273
+ agentCounts[c.agent] = (agentCounts[c.agent] || 0) + 1;
274
+ const full = this.memories.find(m => m.id === c.id);
275
+ for (const tag of (full?.tags || [])) tagCounts[tag] = (tagCounts[tag] || 0) + 1;
276
+ }
277
+ clusters.push({ size: cluster.length, agents: agentCounts, topTags: Object.entries(tagCounts).sort((a, b) => b[1] - a[1]).slice(0, 5).map(e => e[0]), memories: cluster });
278
+ }
279
+ }
280
+ return clusters.sort((a, b) => b.size - a.size);
281
+ }
282
+
283
+ // ── Shortest path ────────────────────────────────────────
284
+
285
+ async findPath(idA, idB) {
286
+ await this.init();
287
+ if (!this.memories.find(m => m.id === idA) || !this.memories.find(m => m.id === idB)) return null;
288
+ const visited = new Map();
289
+ const queue = [idA];
290
+ visited.set(idA, null);
291
+
292
+ while (queue.length) {
293
+ const id = queue.shift();
294
+ if (id === idB) {
295
+ const path = [];
296
+ let cur = idB;
297
+ while (cur !== null) { const mem = this.memories.find(m => m.id === cur); path.unshift({ id: cur, memory: mem?.memory || '?', agent: mem?.agent || '?', category: mem?.category || '?' }); cur = visited.get(cur); }
298
+ return { found: true, hops: path.length - 1, path };
299
+ }
300
+ const mem = this.memories.find(m => m.id === id);
301
+ if (mem) for (const link of (mem.links || [])) if (!visited.has(link.id)) { visited.set(link.id, id); queue.push(link.id); }
302
+ }
303
+ return { found: false, hops: -1, path: [] };
304
+ }
305
+
306
+ // ── Orphans ──────────────────────────────────────────────
307
+
308
+ async findOrphans(agent = null, maxLinks = 0) {
309
+ await this.init();
310
+ let candidates = agent ? this.memories.filter(m => m.agent === agent) : this.memories;
311
+ return candidates
312
+ .filter(m => (m.links || []).length <= maxLinks)
313
+ .map(m => { const { strength, ageDays } = this.calcStrength(m); return { id: m.id, memory: m.memory, agent: m.agent, category: m.category, importance: m.importance, links: (m.links || []).length, strength: strength.toFixed(3), ageDays: ageDays.toFixed(1) }; })
314
+ .sort((a, b) => parseFloat(a.strength) - parseFloat(b.strength));
315
+ }
316
+
317
+ // ── Timeline ─────────────────────────────────────────────
318
+
319
+ async timeline(agent = null, days = 7) {
320
+ await this.init();
321
+ let mems = agent ? this.memories.filter(m => m.agent === agent) : this.memories;
322
+ const cutoff = Date.now() - (days * 864e5);
323
+ mems = mems.filter(m => new Date(m.created_at).getTime() > cutoff);
324
+ const byDate = {};
325
+ for (const m of mems) { const d = m.created_at.split('T')[0]; if (!byDate[d]) byDate[d] = []; byDate[d].push({ id: m.id, memory: m.memory, agent: m.agent, category: m.category, importance: m.importance, links: (m.links || []).length }); }
326
+ return { days, agent, dates: byDate, total: mems.length };
327
+ }
328
+
329
+ // ── Health report ────────────────────────────────────────
330
+
331
+ async healthReport() {
332
+ await this.init();
333
+ const stats = await this.stats();
334
+ const dist = { strong: 0, healthy: 0, weakening: 0, critical: 0, dead: 0 };
335
+ const strengths = [];
336
+ for (const mem of this.memories) {
337
+ const { strength } = this.calcStrength(mem);
338
+ strengths.push(strength);
339
+ if (strength >= 0.7) dist.strong++; else if (strength >= 0.3) dist.healthy++; else if (strength >= 0.15) dist.weakening++; else if (strength >= 0.05) dist.critical++; else dist.dead++;
340
+ }
341
+ const orphans = this.memories.filter(m => (m.links || []).length === 0).length;
342
+ const archivedCount = (await this.storage.loadArchive()).length;
343
+ const ages = this.memories.map(m => (Date.now() - new Date(m.created_at).getTime()) / 864e5);
344
+ return {
345
+ ...stats,
346
+ avgStrength: strengths.length ? (strengths.reduce((a, b) => a + b, 0) / strengths.length).toFixed(3) : '0',
347
+ distribution: dist,
348
+ orphans,
349
+ archivedCount,
350
+ avgAgeDays: ages.length ? (ages.reduce((a, b) => a + b, 0) / ages.length).toFixed(1) : '0',
351
+ maxAgeDays: ages.length ? Math.max(...ages).toFixed(1) : '0'
352
+ };
353
+ }
354
+
355
+ // ── Conflict resolution & evolution ──────────────────────
356
+
357
+ async detectConflicts(agent, newText) {
358
+ await this.init();
359
+ if (!this.extraction) return { conflicts: [], updates: [], novel: true };
360
+ const [newEmb] = await this.embedding.embed(newText);
361
+ const candidates = this.memories.filter(m => m.embedding).map(m => ({ ...m, sim: cosineSimilarity(newEmb, m.embedding) })).filter(m => m.sim > 0.6).sort((a, b) => b.sim - a.sim).slice(0, 10);
362
+ if (!candidates.length) return { conflicts: [], updates: [], novel: true };
363
+
364
+ const existing = candidates.map((c, i) => ({ index: i, id: c.id, agent: c.agent, memory: c.memory }));
365
+ const result = await this.extraction.detectConflicts(newText, existing);
366
+
367
+ result.conflicts = (result.conflicts || []).map(c => ({ ...c, memory: candidates[c.index]?.memory, memoryId: candidates[c.index]?.id, agent: candidates[c.index]?.agent, similarity: candidates[c.index]?.sim }));
368
+ result.updates = (result.updates || []).map(u => ({ ...u, memory: candidates[u.index]?.memory, memoryId: candidates[u.index]?.id, agent: candidates[u.index]?.agent, similarity: candidates[u.index]?.sim }));
369
+ return result;
370
+ }
371
+
372
+ async evolve(agent, memory, category, importance, tags) {
373
+ const conflicts = await this.detectConflicts(agent, memory);
374
+ const actions = [];
375
+
376
+ for (const conflict of (conflicts.conflicts || [])) {
377
+ if (!conflict.memoryId) continue;
378
+ const old = this.memories.find(m => m.id === conflict.memoryId);
379
+ if (!old) continue;
380
+ old.archived_at = new Date().toISOString();
381
+ old.archived_reason = `Superseded: ${conflict.reason}`;
382
+ let archived = await this.storage.loadArchive();
383
+ archived.push({ ...old, embedding: undefined });
384
+ await this.storage.saveArchive(archived);
385
+ this.memories = this.memories.filter(m => m.id !== conflict.memoryId);
386
+ actions.push({ type: 'archived', id: conflict.memoryId, reason: conflict.reason, old: old.memory });
387
+ }
388
+
389
+ for (const update of (conflicts.updates || [])) {
390
+ if (!update.memoryId) continue;
391
+ const existing = this.memories.find(m => m.id === update.memoryId);
392
+ if (!existing) continue;
393
+ const oldContent = existing.memory;
394
+ existing.memory = memory;
395
+ existing.updated_at = new Date().toISOString();
396
+ existing.importance = Math.max(existing.importance, importance);
397
+ const [newEmb] = await this.embedding.embed(memory);
398
+ existing.embedding = newEmb;
399
+ existing.evolution = existing.evolution || [];
400
+ existing.evolution.push({ from: oldContent, to: memory, reason: update.reason, at: new Date().toISOString() });
401
+ await this.save();
402
+ actions.push({ type: 'updated', id: update.memoryId, reason: update.reason, old: oldContent, new: memory });
403
+ return { actions, stored: false, evolved: true };
404
+ }
405
+
406
+ const result = await this.addWithLinking(agent, memory, category, importance, tags);
407
+ actions.push({ type: 'stored', id: result.id, links: result.links });
408
+ return { actions, stored: true, id: result.id, links: result.links, conflicts: conflicts.conflicts?.length || 0 };
409
+ }
410
+
411
+ // ── Context generation ───────────────────────────────────
412
+
413
+ async generateContext(agent, query, maxMemories = 15) {
414
+ await this.init();
415
+ const results = await this.search(query, null, 8);
416
+ const seen = new Set();
417
+ const contextMems = [];
418
+
419
+ for (const r of results) {
420
+ if (seen.has(r.id)) continue;
421
+ seen.add(r.id);
422
+ contextMems.push({ ...r, source: 'direct' });
423
+ const mem = this.memories.find(m => m.id === r.id);
424
+ if (mem) for (const link of (mem.links || []).slice(0, 3)) {
425
+ if (seen.has(link.id)) continue;
426
+ seen.add(link.id);
427
+ const linked = this.memories.find(m => m.id === link.id);
428
+ if (linked) contextMems.push({ id: linked.id, memory: linked.memory, agent: linked.agent, category: linked.category, importance: linked.importance, score: link.similarity * r.score, source: 'linked' });
429
+ }
430
+ if (contextMems.length >= maxMemories) break;
431
+ }
432
+
433
+ contextMems.sort((a, b) => (b.score || 0) - (a.score || 0));
434
+ const top = contextMems.slice(0, maxMemories);
435
+ if (!top.length) return { query, context: '(no relevant memories found)', count: 0 };
436
+
437
+ const lines = [`## Relevant Memory Context (query: "${query}")\n`];
438
+ const byCategory = {};
439
+ for (const m of top) { const cat = m.category || 'fact'; if (!byCategory[cat]) byCategory[cat] = []; byCategory[cat].push(m); }
440
+ for (const cat of ['decision', 'finding', 'preference', 'insight', 'fact', 'event', 'task']) {
441
+ if (!byCategory[cat]) continue;
442
+ lines.push(`### ${cat.charAt(0).toUpperCase() + cat.slice(1)}s`);
443
+ for (const m of byCategory[cat]) { const tag = m.agent !== agent ? ` (${m.agent})` : ''; lines.push(`- ${m.memory}${tag}`); }
444
+ lines.push('');
445
+ }
446
+ return { query, context: lines.join('\n'), count: top.length, memories: top };
447
+ }
448
+
449
+ // ── Stats ────────────────────────────────────────────────
450
+
451
+ async stats(agent = null) {
452
+ await this.init();
453
+ let mems = agent ? this.memories.filter(m => m.agent === agent) : this.memories;
454
+ const byAgent = {}, byCategory = {};
455
+ let totalLinks = 0, crossAgentLinks = 0;
456
+ for (const m of mems) {
457
+ byAgent[m.agent] = (byAgent[m.agent] || 0) + 1;
458
+ byCategory[m.category] = (byCategory[m.category] || 0) + 1;
459
+ totalLinks += (m.links || []).length;
460
+ for (const link of (m.links || [])) { const t = this.memories.find(x => x.id === link.id); if (t && t.agent !== m.agent) crossAgentLinks++; }
461
+ }
462
+ return { total: mems.length, byAgent, byCategory, totalLinks, crossAgentLinks, avgLinksPerMemory: mems.length ? (totalLinks / mems.length).toFixed(1) : '0' };
463
+ }
464
+ }
@@ -0,0 +1,72 @@
1
+ /**
2
+ * Embedding Provider Interface
3
+ *
4
+ * Implementations must provide:
5
+ * embed(texts: string[]): Promise<number[][]> — returns array of embedding vectors
6
+ */
7
+
8
+ /**
9
+ * @typedef {Object} EmbeddingProvider
10
+ * @property {(texts: string[]) => Promise<number[][]>} embed
11
+ */
12
+
13
+ /**
14
+ * NVIDIA NIM embedding provider (baai/bge-m3)
15
+ */
16
+ export class NvidiaEmbeddingProvider {
17
+ /**
18
+ * @param {Object} opts
19
+ * @param {string} opts.apiKey — NVIDIA_API_KEY
20
+ * @param {string} [opts.model] — embedding model (default: baai/bge-m3)
21
+ * @param {string} [opts.baseUrl] — API base URL
22
+ */
23
+ constructor({ apiKey, model = 'baai/bge-m3', baseUrl = 'https://integrate.api.nvidia.com/v1/embeddings' } = {}) {
24
+ if (!apiKey) throw new Error('apiKey is required');
25
+ this.apiKey = apiKey;
26
+ this.model = model;
27
+ this.baseUrl = baseUrl;
28
+ }
29
+
30
+ /** @param {string|string[]} texts */
31
+ async embed(texts) {
32
+ const input = Array.isArray(texts) ? texts : [texts];
33
+ const res = await fetch(this.baseUrl, {
34
+ method: 'POST',
35
+ headers: { 'Authorization': `Bearer ${this.apiKey}`, 'Content-Type': 'application/json' },
36
+ body: JSON.stringify({ model: this.model, input })
37
+ });
38
+ if (!res.ok) {
39
+ if (res.status === 429) {
40
+ await new Promise(r => setTimeout(r, 2000));
41
+ return this.embed(texts);
42
+ }
43
+ throw new Error(`Embedding failed: ${res.status} ${await res.text()}`);
44
+ }
45
+ const data = await res.json();
46
+ return data.data.map(d => d.embedding);
47
+ }
48
+ }
49
+
50
+ /**
51
+ * OpenAI-compatible embedding provider
52
+ */
53
+ export class OpenAIEmbeddingProvider {
54
+ constructor({ apiKey, model = 'text-embedding-3-small', baseUrl = 'https://api.openai.com/v1/embeddings' } = {}) {
55
+ if (!apiKey) throw new Error('apiKey is required');
56
+ this.apiKey = apiKey;
57
+ this.model = model;
58
+ this.baseUrl = baseUrl;
59
+ }
60
+
61
+ async embed(texts) {
62
+ const input = Array.isArray(texts) ? texts : [texts];
63
+ const res = await fetch(this.baseUrl, {
64
+ method: 'POST',
65
+ headers: { 'Authorization': `Bearer ${this.apiKey}`, 'Content-Type': 'application/json' },
66
+ body: JSON.stringify({ model: this.model, input })
67
+ });
68
+ if (!res.ok) throw new Error(`Embedding failed: ${res.status} ${await res.text()}`);
69
+ const data = await res.json();
70
+ return data.data.map(d => d.embedding);
71
+ }
72
+ }
@@ -0,0 +1,119 @@
1
+ /**
2
+ * Extraction Provider Interface
3
+ *
4
+ * Implementations must provide:
5
+ * extractFacts(text: string): Promise<Fact[]>
6
+ * detectConflicts(newText: string, existingFacts: string[]): Promise<ConflictResult>
7
+ *
8
+ * Fact: { fact: string, category: string, importance: number, tags: string[] }
9
+ * ConflictResult: { conflicts: [...], updates: [...], novel: boolean }
10
+ */
11
+
12
+ /**
13
+ * OpenAI-compatible LLM extraction provider
14
+ * Works with any OpenAI chat completions API (OpenAI, Anthropic via proxy, local LLMs)
15
+ */
16
+ export class LLMExtractionProvider {
17
+ /**
18
+ * @param {Object} opts
19
+ * @param {string} opts.baseUrl — chat completions endpoint
20
+ * @param {string} [opts.apiKey] — bearer token
21
+ * @param {string} [opts.model] — model to use
22
+ */
23
+ constructor({ baseUrl, apiKey, model = 'anthropic/claude-haiku-4-5' } = {}) {
24
+ if (!baseUrl) throw new Error('baseUrl is required');
25
+ this.baseUrl = baseUrl;
26
+ this.apiKey = apiKey;
27
+ this.model = model;
28
+ }
29
+
30
+ async _chat(prompt) {
31
+ const headers = { 'Content-Type': 'application/json' };
32
+ if (this.apiKey) headers['Authorization'] = `Bearer ${this.apiKey}`;
33
+ const res = await fetch(this.baseUrl, {
34
+ method: 'POST',
35
+ headers,
36
+ body: JSON.stringify({
37
+ model: this.model,
38
+ messages: [{ role: 'user', content: prompt }],
39
+ max_tokens: 2000,
40
+ temperature: 0.1
41
+ })
42
+ });
43
+ if (!res.ok) throw new Error(`LLM request failed: ${res.status}`);
44
+ const data = await res.json();
45
+ const content = data.choices?.[0]?.message?.content || '';
46
+ return content.replace(/```json\n?/g, '').replace(/```\n?/g, '').trim();
47
+ }
48
+
49
+ async extractFacts(text) {
50
+ const prompt = `You are a precise fact extractor. Extract discrete, atomic facts from the following text. Each fact should be self-contained and include specific details (names, numbers, dates, decisions, preferences).
51
+
52
+ Output as a JSON array of objects with fields:
53
+ - "fact": the extracted fact (string)
54
+ - "category": one of "decision", "finding", "fact", "insight", "task", "event", "preference"
55
+ - "importance": 0.0 to 1.0 (how important for long-term memory)
56
+ - "tags": array of relevant keywords
57
+
58
+ Text to extract from:
59
+ ${text}
60
+
61
+ Respond ONLY with the JSON array, no markdown formatting.`;
62
+
63
+ try {
64
+ const json = await this._chat(prompt);
65
+ return JSON.parse(json);
66
+ } catch (e) {
67
+ return [{ fact: text, category: 'fact', importance: 0.5, tags: [] }];
68
+ }
69
+ }
70
+
71
+ /**
72
+ * @param {string} newText
73
+ * @param {{ index: number, id: string, agent: string, memory: string }[]} existing
74
+ */
75
+ async detectConflicts(newText, existing) {
76
+ if (!existing.length) return { conflicts: [], updates: [], novel: true };
77
+
78
+ const existingFacts = existing.map((c, i) =>
79
+ `[${i}] (id:${c.id}, agent:${c.agent}) ${c.memory}`
80
+ ).join('\n');
81
+
82
+ const prompt = `You are a fact-checker. Compare the NEW FACT against EXISTING FACTS and identify:
83
+ 1. CONFLICTS: The new fact directly contradicts an existing fact
84
+ 2. UPDATES: The new fact is a newer version of an existing fact (same topic, updated info)
85
+ 3. NOVEL: The new fact adds genuinely new information
86
+
87
+ NEW FACT: ${newText}
88
+
89
+ EXISTING FACTS:
90
+ ${existingFacts}
91
+
92
+ Respond ONLY with a JSON object:
93
+ {
94
+ "conflicts": [{"index": <number>, "reason": "<why it contradicts>"}],
95
+ "updates": [{"index": <number>, "reason": "<what changed>"}],
96
+ "novel": <true|false>
97
+ }`;
98
+
99
+ try {
100
+ const json = await this._chat(prompt);
101
+ return JSON.parse(json);
102
+ } catch (e) {
103
+ return { conflicts: [], updates: [], novel: true, error: e.message };
104
+ }
105
+ }
106
+ }
107
+
108
+ /**
109
+ * No-op extraction (just wraps text as a single fact, no LLM needed)
110
+ */
111
+ export class PassthroughExtractionProvider {
112
+ async extractFacts(text) {
113
+ return [{ fact: text, category: 'fact', importance: 0.5, tags: [] }];
114
+ }
115
+
116
+ async detectConflicts() {
117
+ return { conflicts: [], updates: [], novel: true };
118
+ }
119
+ }
@@ -0,0 +1,103 @@
1
+ /**
2
+ * Storage Provider Interface
3
+ *
4
+ * Implementations must provide:
5
+ * load(): Promise<Memory[]>
6
+ * save(memories: Memory[]): Promise<void>
7
+ * loadArchive(): Promise<Memory[]>
8
+ * saveArchive(memories: Memory[]): Promise<void>
9
+ */
10
+
11
+ import { readFile, writeFile, mkdir } from 'fs/promises';
12
+ import { join } from 'path';
13
+ import { existsSync } from 'fs';
14
+
15
+ /**
16
+ * Local JSON file storage (default)
17
+ */
18
+ export class JsonFileStorage {
19
+ /**
20
+ * @param {string} storePath — directory for graph.json / archived.json
21
+ */
22
+ constructor(storePath) {
23
+ this.storePath = storePath;
24
+ }
25
+
26
+ async load() {
27
+ await mkdir(this.storePath, { recursive: true });
28
+ const file = join(this.storePath, 'graph.json');
29
+ if (!existsSync(file)) return [];
30
+ let raw = await readFile(file, 'utf8');
31
+ if (raw.charCodeAt(0) === 0xFEFF) raw = raw.slice(1);
32
+ return JSON.parse(raw);
33
+ }
34
+
35
+ async save(memories) {
36
+ const file = join(this.storePath, 'graph.json');
37
+ await writeFile(file, JSON.stringify(memories, null, 2), 'utf8');
38
+ }
39
+
40
+ async loadArchive() {
41
+ const file = join(this.storePath, 'archived.json');
42
+ if (!existsSync(file)) return [];
43
+ return JSON.parse(await readFile(file, 'utf8'));
44
+ }
45
+
46
+ async saveArchive(memories) {
47
+ const file = join(this.storePath, 'archived.json');
48
+ await writeFile(file, JSON.stringify(memories, null, 2), 'utf8');
49
+ }
50
+ }
51
+
52
+ /**
53
+ * Supabase REST storage
54
+ */
55
+ export class SupabaseStorage {
56
+ /**
57
+ * @param {Object} opts
58
+ * @param {string} opts.url — Supabase project URL
59
+ * @param {string} opts.key — service_role or anon key
60
+ * @param {string} [opts.table] — table name (default: memories)
61
+ */
62
+ constructor({ url, key, table = 'memories' } = {}) {
63
+ if (!url || !key) throw new Error('url and key are required');
64
+ this.url = url;
65
+ this.key = key;
66
+ this.table = table;
67
+ this.headers = {
68
+ 'apikey': key,
69
+ 'Authorization': `Bearer ${key}`,
70
+ 'Content-Type': 'application/json'
71
+ };
72
+ }
73
+
74
+ async load() {
75
+ const res = await fetch(`${this.url}/rest/v1/${this.table}?archived_at=is.null&select=*`, {
76
+ headers: this.headers
77
+ });
78
+ if (!res.ok) throw new Error(`Supabase load failed: ${res.status}`);
79
+ return res.json();
80
+ }
81
+
82
+ async save(memories) {
83
+ // Upsert all memories
84
+ const res = await fetch(`${this.url}/rest/v1/${this.table}`, {
85
+ method: 'POST',
86
+ headers: { ...this.headers, 'Prefer': 'resolution=merge-duplicates' },
87
+ body: JSON.stringify(memories)
88
+ });
89
+ if (!res.ok) throw new Error(`Supabase save failed: ${res.status}`);
90
+ }
91
+
92
+ async loadArchive() {
93
+ const res = await fetch(`${this.url}/rest/v1/${this.table}?archived_at=not.is.null&select=*`, {
94
+ headers: this.headers
95
+ });
96
+ if (!res.ok) return [];
97
+ return res.json();
98
+ }
99
+
100
+ async saveArchive(memories) {
101
+ await this.save(memories); // Same table, just has archived_at set
102
+ }
103
+ }