@icex-labs/openclaw-memory-engine 3.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.js ADDED
@@ -0,0 +1,626 @@
1
+ /**
2
+ * @icex-labs/openclaw-memory-engine v2.0.0
3
+ *
4
+ * MemGPT-style hierarchical memory plugin for OpenClaw.
5
+ *
6
+ * Tools (19):
7
+ * Core: core_memory_read, core_memory_replace, core_memory_append
8
+ * Archival: archival_insert, archival_search, archival_update, archival_delete, archival_stats
9
+ * Graph: graph_query, graph_add
10
+ * Episodes: episode_save, episode_recall
11
+ * Reflection: memory_reflect
12
+ * Maintenance: archival_deduplicate, memory_consolidate
13
+ * Backup: memory_export, memory_import
14
+ * Admin: memory_migrate, memory_dashboard
15
+ */
16
+
17
+ import { definePluginEntry } from "openclaw/plugin-sdk/plugin-entry";
18
+ import { existsSync } from "node:fs";
19
+
20
+ import { resolveWorkspace, getCoreSizeLimit, DEFAULT_TOP_K, MAX_TOP_K } from "./lib/paths.js";
21
+ import { readCore, writeCore, dotGet, dotSet, autoParse } from "./lib/core.js";
22
+ import { loadArchival, appendRecord, rewriteArchival, archivalPath } from "./lib/archival.js";
23
+ import { indexEmbedding, loadEmbeddingCache, saveEmbeddingCache } from "./lib/embedding.js";
24
+ import { hybridSearch } from "./lib/search.js";
25
+ import { consolidateText } from "./lib/consolidate.js";
26
+ import { findDuplicates, applyDedup } from "./lib/dedup.js";
27
+ import { exportMemory, importMemory } from "./lib/backup.js";
28
+ import { queryGraph, addTriple, extractTriples } from "./lib/graph.js";
29
+ import { saveEpisode, recallEpisodes, indexEpisodeEmbedding } from "./lib/episodes.js";
30
+ import { analyzePatterns, formatReflection } from "./lib/reflection.js";
31
+ import { migrateFromJsonl } from "./lib/store-sqlite.js";
32
+ import { generateDashboard } from "./lib/dashboard.js";
33
+
34
+ import { readFileSync } from "node:fs";
35
+
36
+ // ═══════════════════════════════════════════════════════════════════
37
+ // Helper: format search results
38
+ // ═══════════════════════════════════════════════════════════════════
39
+
40
+ function formatResults(results) {
41
+ return results
42
+ .map(
43
+ (r, i) =>
44
+ `[${i + 1}] (${r.ts?.slice(0, 10) || "?"}) ${r.entity ? `[${r.entity}] ` : ""}${r.content}${r.tags?.length ? ` #${r.tags.join(" #")}` : ""}`,
45
+ )
46
+ .join("\n");
47
+ }
48
+
49
+ function text(msg) {
50
+ return { content: [{ type: "text", text: msg }] };
51
+ }
52
+
53
+ /**
54
+ * Extract agentId from session key.
55
+ * Session key format: "agent:<agentId>:..." (e.g., "agent:wife:discord:channel:123")
56
+ */
57
+ function extractAgentId(sessionKey) {
58
+ if (!sessionKey) return null;
59
+ const parts = sessionKey.split(":");
60
+ // Format: agent:<agentId>:...
61
+ if (parts[0] === "agent" && parts.length >= 2) return parts[1];
62
+ return null;
63
+ }
64
+
65
+ /** Resolve workspace for the current tool invocation. */
66
+ function ws(factoryAgentId, params) {
67
+ return resolveWorkspace({ agentId: factoryAgentId }, params?.agent);
68
+ }
69
+
70
+ /**
71
+ * Wrap a tool definition into a factory that binds agentId from session context.
72
+ * Usage: api.registerTool(withAgent((agentId) => ({ name: ..., execute: ... })))
73
+ */
74
+ function withAgent(toolFn) {
75
+ return (factoryCtx) => {
76
+ const agentId = factoryCtx?.agentId || extractAgentId(factoryCtx?.sessionKey) || factoryCtx?.workspaceDir || null;
77
+ return toolFn(agentId);
78
+ };
79
+ }
80
+
81
+ // ═══════════════════════════════════════════════════════════════════
82
+ // Plugin entry
83
+ // ═══════════════════════════════════════════════════════════════════
84
+
85
+ export default definePluginEntry({
86
+ id: "memory-engine",
87
+ name: "Memory Engine",
88
+ description:
89
+ "MemGPT-style hierarchical memory: core block, archival storage, hybrid search, dedup, consolidate, backup/restore",
90
+
91
+ register(api) {
92
+ // ═══════════════════════════════════════════════════════════════════
93
+ // All tools use factory pattern to extract agentId from ctx.sessionKey.
94
+ // This enables per-agent workspace resolution for multi-workspace setups.
95
+ // Factory ctx has: { sessionKey, workspaceDir, agentId, ... }
96
+ // ═══════════════════════════════════════════════════════════════════
97
+
98
+ // ─── core_memory_read ───
99
+ api.registerTool(withAgent((agentId) => ({
100
+ name: "core_memory_read",
101
+ description:
102
+ "Read the entire core memory block. Contains user identity, relationship, preferences, and current focus. Call at session start.",
103
+ parameters: { type: "object", properties: {}, additionalProperties: false },
104
+ async execute(_id, _params) {
105
+ return text(JSON.stringify(readCore(ws(agentId, _params)), null, 2));
106
+ },
107
+ })));
108
+
109
+ // ─── core_memory_replace ───
110
+ api.registerTool(withAgent((agentId) => ({
111
+ name: "core_memory_replace",
112
+ description:
113
+ "Atomically update a field in core memory using dot-path notation (e.g., 'user.location', 'current_focus'). Value is auto-parsed if it looks like JSON. Core memory must stay small (<3KB).",
114
+ parameters: {
115
+ type: "object",
116
+ properties: {
117
+ key: { type: "string", description: "Dot-path key (e.g., 'user.location', 'current_focus')" },
118
+ value: { description: "New value — string, array, or object. Auto-parsed from JSON strings." },
119
+ },
120
+ required: ["key", "value"],
121
+ additionalProperties: false,
122
+ },
123
+ async execute(_id, params) {
124
+ const wsp = ws(agentId, params);
125
+ const limit = getCoreSizeLimit(null);
126
+ const core = readCore(wsp);
127
+ const value = autoParse(params.value);
128
+ const old = dotSet(core, params.key, value);
129
+ const size = JSON.stringify(core, null, 2).length;
130
+ if (size > limit) {
131
+ dotSet(core, params.key, old);
132
+ return text(`ERROR: Would exceed ${limit}B limit (${size}B). Use archival_insert for details.`);
133
+ }
134
+ writeCore(wsp, core);
135
+ return text(`OK: ['${params.key}'] updated. Old: ${JSON.stringify(old)} → New: ${JSON.stringify(value)}`);
136
+ },
137
+ })));
138
+
139
+ // ─── core_memory_append ───
140
+ api.registerTool(withAgent((agentId) => ({
141
+ name: "core_memory_append",
142
+ description:
143
+ "Append an item to an array field in core memory (e.g., current_focus). Creates the array if needed.",
144
+ parameters: {
145
+ type: "object",
146
+ properties: {
147
+ key: { type: "string", description: "Dot-path to array field (e.g., 'current_focus')" },
148
+ item: { type: "string", description: "Item to append" },
149
+ },
150
+ required: ["key", "item"],
151
+ additionalProperties: false,
152
+ },
153
+ async execute(_id, params) {
154
+ const wsp = ws(agentId, params);
155
+ const limit = getCoreSizeLimit(null);
156
+ const core = readCore(wsp);
157
+ let arr = dotGet(core, params.key);
158
+ if (!Array.isArray(arr)) {
159
+ arr = arr != null ? [arr] : [];
160
+ dotSet(core, params.key, arr);
161
+ }
162
+ arr.push(params.item);
163
+ const size = JSON.stringify(core, null, 2).length;
164
+ if (size > limit) {
165
+ arr.pop();
166
+ return text(`ERROR: Would exceed ${limit}B limit. Remove an item first or use archival_insert.`);
167
+ }
168
+ writeCore(wsp, core);
169
+ return text(`OK: Appended "${params.item}" to ${params.key} (now ${arr.length} items)`);
170
+ },
171
+ })));
172
+
173
+ // ─── archival_insert ───
174
+ api.registerTool(withAgent((agentId) => ({
175
+ name: "archival_insert",
176
+ description:
177
+ "Store a memory/fact in archival storage. Tags with entity and tags. Auto-extracts knowledge graph triples. Set importance (1-10, default 5) to influence search ranking and forgetting.",
178
+ parameters: {
179
+ type: "object",
180
+ properties: {
181
+ content: { type: "string", description: "The fact to store (1-3 sentences, specific)" },
182
+ entity: { type: "string", description: "Primary entity (e.g., 'George', 'GX550')" },
183
+ tags: { type: "array", items: { type: "string" }, description: "Category tags" },
184
+ importance: { type: "number", description: "1-10, how important is this fact? (default: 5). High=permanent, Low=may be forgotten." },
185
+ },
186
+ required: ["content"],
187
+ additionalProperties: false,
188
+ },
189
+ async execute(_id, params) {
190
+ const wsp = ws(agentId, params);
191
+ const imp = Math.min(10, Math.max(1, params.importance ?? 5));
192
+ const record = appendRecord(wsp, {
193
+ content: params.content,
194
+ entity: params.entity || "",
195
+ tags: params.tags || [],
196
+ importance: imp,
197
+ });
198
+ indexEmbedding(wsp, record).catch(() => {});
199
+
200
+ // Auto-extract knowledge graph triples
201
+ const triples = extractTriples(params.content);
202
+ const graphResults = [];
203
+ for (const t of triples) {
204
+ const added = addTriple(wsp, t.s, t.r, t.o, record.id);
205
+ if (added) graphResults.push(`(${t.s} --${t.r}--> ${t.o})`);
206
+ }
207
+
208
+ let msg = `OK: Archived ${record.id} (importance=${imp}). "${record.content.slice(0, 80)}..."`;
209
+ if (graphResults.length > 0) {
210
+ msg += `\nGraph: extracted ${graphResults.length} relation(s): ${graphResults.join(", ")}`;
211
+ }
212
+ return text(msg);
213
+ },
214
+ })));
215
+
216
+ // ─── archival_search ───
217
+ api.registerTool(withAgent((agentId) => ({
218
+ name: "archival_search",
219
+ description:
220
+ "Hybrid search over archival memory: keyword + semantic similarity + recency + access decay. Use before answering factual questions.",
221
+ parameters: {
222
+ type: "object",
223
+ properties: {
224
+ query: { type: "string", description: "Search query with specific keywords" },
225
+ top_k: { type: "number", description: `Results to return (default ${DEFAULT_TOP_K}, max ${MAX_TOP_K})` },
226
+ },
227
+ required: ["query"],
228
+ additionalProperties: false,
229
+ },
230
+ async execute(_id, params) {
231
+ const wsp = ws(agentId, params);
232
+ const topK = Math.min(params.top_k || DEFAULT_TOP_K, MAX_TOP_K);
233
+ const results = await hybridSearch(wsp, params.query, topK);
234
+ if (results.length === 0) return text(`No archival memories found for: "${params.query}"`);
235
+ return text(`Found ${results.length} results:\n${formatResults(results)}`);
236
+ },
237
+ })));
238
+
239
+ // ─── archival_update ───
240
+ api.registerTool(withAgent((agentId) => ({
241
+ name: "archival_update",
242
+ description:
243
+ "Update an existing archival record by ID. Use to correct wrong facts.",
244
+ parameters: {
245
+ type: "object",
246
+ properties: {
247
+ id: { type: "string", description: "Record ID" },
248
+ content: { type: "string", description: "New content" },
249
+ entity: { type: "string", description: "New entity (optional)" },
250
+ tags: { type: "array", items: { type: "string" }, description: "New tags (optional)" },
251
+ },
252
+ required: ["id", "content"],
253
+ additionalProperties: false,
254
+ },
255
+ async execute(_id, params) {
256
+ const wsp = ws(agentId, params);
257
+ const records = loadArchival(wsp);
258
+ const idx = records.findIndex((r) => r.id === params.id);
259
+ if (idx === -1) return text(`ERROR: Record ${params.id} not found.`);
260
+ const old = records[idx].content;
261
+ records[idx].content = params.content;
262
+ records[idx].updated_at = new Date().toISOString();
263
+ if (params.entity !== undefined) records[idx].entity = params.entity;
264
+ if (params.tags !== undefined) records[idx].tags = params.tags;
265
+ rewriteArchival(wsp, records);
266
+ const embCache = loadEmbeddingCache(wsp);
267
+ delete embCache[params.id];
268
+ saveEmbeddingCache(wsp);
269
+ indexEmbedding(wsp, records[idx]).catch(() => {});
270
+ return text(`OK: Updated ${params.id}. Old: "${old.slice(0, 60)}..." → New: "${params.content.slice(0, 60)}..."`);
271
+ },
272
+ })));
273
+
274
+ // ─── archival_delete ───
275
+ api.registerTool(withAgent((agentId) => ({
276
+ name: "archival_delete",
277
+ description: "Delete an archival record by ID.",
278
+ parameters: {
279
+ type: "object",
280
+ properties: { id: { type: "string", description: "Record ID to delete" } },
281
+ required: ["id"],
282
+ additionalProperties: false,
283
+ },
284
+ async execute(_id, params) {
285
+ const wsp = ws(agentId, params);
286
+ const records = loadArchival(wsp);
287
+ const idx = records.findIndex((r) => r.id === params.id);
288
+ if (idx === -1) return text(`ERROR: Record ${params.id} not found.`);
289
+ const removed = records.splice(idx, 1)[0];
290
+ rewriteArchival(wsp, records);
291
+ const embCache = loadEmbeddingCache(wsp);
292
+ delete embCache[params.id];
293
+ saveEmbeddingCache(wsp);
294
+ return text(`OK: Deleted ${params.id}. Was: "${removed.content.slice(0, 80)}..."`);
295
+ },
296
+ })));
297
+
298
+ // ─── archival_stats ───
299
+ api.registerTool(withAgent((agentId) => ({
300
+ name: "archival_stats",
301
+ description: "Show archival memory statistics.",
302
+ parameters: { type: "object", properties: {}, additionalProperties: false },
303
+ async execute(_id, params) {
304
+ const wsp = ws(agentId, params);
305
+ const records = loadArchival(wsp);
306
+ const embCache = loadEmbeddingCache(wsp);
307
+ const entityCounts = {};
308
+ const tagCounts = {};
309
+ let recentCount = 0;
310
+ const oneWeekAgo = Date.now() - 7 * 86400000;
311
+ for (const r of records) {
312
+ entityCounts[r.entity || "(none)"] = (entityCounts[r.entity || "(none)"] || 0) + 1;
313
+ for (const t of r.tags || []) tagCounts[t] = (tagCounts[t] || 0) + 1;
314
+ if (r.ts && new Date(r.ts).getTime() > oneWeekAgo) recentCount++;
315
+ }
316
+ const topE = Object.entries(entityCounts).sort((a, b) => b[1] - a[1]).slice(0, 10).map(([e, c]) => ` ${e}: ${c}`).join("\n");
317
+ const topT = Object.entries(tagCounts).sort((a, b) => b[1] - a[1]).slice(0, 10).map(([t, c]) => ` ${t}: ${c}`).join("\n");
318
+ const p = archivalPath(wsp);
319
+ const fileSize = existsSync(p) ? readFileSync(p).length : 0;
320
+ return text([
321
+ `Total records: ${records.length}`,
322
+ `Embedded: ${Object.keys(embCache).length}/${records.length}`,
323
+ `Recent (7d): ${recentCount}`,
324
+ `File size: ${(fileSize / 1024).toFixed(1)}KB`,
325
+ `\nTop entities:\n${topE || " (none)"}`,
326
+ `\nTop tags:\n${topT || " (none)"}`,
327
+ ].join("\n"));
328
+ },
329
+ })));
330
+
331
+ // ─── archival_deduplicate ───
332
+ api.registerTool(withAgent((agentId) => ({
333
+ name: "archival_deduplicate",
334
+ description:
335
+ "Scan for near-duplicate records using embedding similarity. Preview by default; pass apply=true to remove.",
336
+ parameters: {
337
+ type: "object",
338
+ properties: {
339
+ apply: { type: "boolean", description: "If true, delete duplicates. Default: preview only." },
340
+ },
341
+ additionalProperties: false,
342
+ },
343
+ async execute(_id, params) {
344
+ const wsp = ws(agentId, params);
345
+ const dupes = await findDuplicates(wsp);
346
+ if (dupes.length === 0) return text("No duplicates found. Archival memory is clean.");
347
+ const preview = dupes
348
+ .map((d, i) => `[${i + 1}] sim=${d.similarity}\n KEEP: ${d.keep.content.slice(0, 80)}\n DROP: ${d.drop.content.slice(0, 80)}`)
349
+ .join("\n\n");
350
+ if (params.apply) {
351
+ const { removed, remaining } = applyDedup(wsp, dupes);
352
+ return text(`Removed ${removed} duplicates (${remaining} remaining):\n\n${preview}`);
353
+ }
354
+ return text(`Found ${dupes.length} potential duplicates (preview, call with apply=true to remove):\n\n${preview}`);
355
+ },
356
+ })));
357
+
358
+ // ─── memory_consolidate ───
359
+ api.registerTool(withAgent((agentId) => ({
360
+ name: "memory_consolidate",
361
+ description:
362
+ "Extract structured facts from text (conversation summary, daily log). Splits by sentence, infers entity, deduplicates against existing archival.",
363
+ parameters: {
364
+ type: "object",
365
+ properties: {
366
+ text: { type: "string", description: "Text to extract facts from" },
367
+ default_entity: { type: "string", description: "Default entity if not inferred" },
368
+ default_tags: { type: "array", items: { type: "string" }, description: "Default tags" },
369
+ },
370
+ required: ["text"],
371
+ additionalProperties: false,
372
+ },
373
+ async execute(_id, params) {
374
+ const wsp = ws(agentId, params);
375
+ const result = await consolidateText(
376
+ ws, params.text, params.default_entity || "", params.default_tags || [],
377
+ );
378
+ if (result.total === 0) return text("No extractable facts found in the provided text.");
379
+ const lines = [
380
+ `Extracted ${result.total} candidates, inserted ${result.inserted.length}, skipped ${result.skipped.length} (duplicate).`,
381
+ ];
382
+ if (result.inserted.length > 0) lines.push(`Inserted IDs: ${result.inserted.join(", ")}`);
383
+ if (result.skipped.length > 0) lines.push(`Skipped: ${result.skipped.map((s) => `"${s}..."`).join(", ")}`);
384
+ return text(lines.join("\n"));
385
+ },
386
+ })));
387
+
388
+ // ─── graph_query ───
389
+ api.registerTool(withAgent((agentId) => ({
390
+ name: "graph_query",
391
+ description:
392
+ "Query the knowledge graph from a starting entity. Returns connected nodes via relations. Use to answer relational questions like 'who is George's doctor' or 'what treats his condition'.",
393
+ parameters: {
394
+ type: "object",
395
+ properties: {
396
+ entity: { type: "string", description: "Starting entity to query from (e.g., 'George', '荨麻疹')" },
397
+ relation: { type: "string", description: "Optional: filter by relation type (e.g., 'has_doctor', 'treated_by')" },
398
+ depth: { type: "number", description: "Traversal depth (default: 2, max: 4)" },
399
+ },
400
+ required: ["entity"],
401
+ additionalProperties: false,
402
+ },
403
+ async execute(_id, params) {
404
+ const wsp = ws(agentId, params);
405
+ const depth = Math.min(params.depth || 2, 4);
406
+ const results = queryGraph(wsp, params.entity, params.relation || null, depth);
407
+ if (results.length === 0) {
408
+ return text(`No graph connections found for entity: "${params.entity}"`);
409
+ }
410
+ const fmt = results.map((r, i) =>
411
+ `[${i + 1}] ${r.path.join(" ")} → ${r.node} (${r.triple.r})`,
412
+ ).join("\n");
413
+ return text(`Found ${results.length} connections from "${params.entity}":\n${fmt}`);
414
+ },
415
+ })));
416
+
417
+ // ─── graph_add ───
418
+ api.registerTool(withAgent((agentId) => ({
419
+ name: "graph_add",
420
+ description:
421
+ "Manually add a relation to the knowledge graph. Use when auto-extraction missed a relation, or to add relations you inferred from conversation.",
422
+ parameters: {
423
+ type: "object",
424
+ properties: {
425
+ subject: { type: "string", description: "Subject entity (e.g., 'George')" },
426
+ relation: { type: "string", description: "Relation type (e.g., 'has_doctor', 'owns', 'lives_in')" },
427
+ object: { type: "string", description: "Object entity (e.g., 'Dr. Smith', 'New York')" },
428
+ },
429
+ required: ["subject", "relation", "object"],
430
+ additionalProperties: false,
431
+ },
432
+ async execute(_id, params) {
433
+ const wsp = ws(agentId, params);
434
+ const triple = addTriple(wsp, params.subject, params.relation, params.object);
435
+ if (!triple) {
436
+ return text(`Relation already exists: (${params.subject} --${params.relation}--> ${params.object})`);
437
+ }
438
+ return text(`OK: Added ${triple.id}: (${params.subject} --${params.relation}--> ${params.object})`);
439
+ },
440
+ })));
441
+
442
+ // ─── episode_save ───
443
+ api.registerTool(withAgent((agentId) => ({
444
+ name: "episode_save",
445
+ description:
446
+ "Save a conversation episode (summary of what was discussed, decisions made, mood). Call at end of meaningful conversations. Enables 'what did we discuss last time about X?' queries.",
447
+ parameters: {
448
+ type: "object",
449
+ properties: {
450
+ summary: { type: "string", description: "1-3 sentence summary of the conversation" },
451
+ decisions: { type: "array", items: { type: "string" }, description: "Decisions or action items from the conversation" },
452
+ mood: { type: "string", description: "Emotional tone (e.g., 'relaxed', 'stressed', 'excited', 'serious')" },
453
+ topics: { type: "array", items: { type: "string" }, description: "Topic tags (e.g., ['vehicles', 'finance'])" },
454
+ participants: { type: "array", items: { type: "string" }, description: "Who was in the conversation" },
455
+ },
456
+ required: ["summary"],
457
+ additionalProperties: false,
458
+ },
459
+ async execute(_id, params) {
460
+ const wsp = ws(agentId, params);
461
+ const ep = saveEpisode(wsp, {
462
+ summary: params.summary,
463
+ decisions: params.decisions || [],
464
+ mood: params.mood || "",
465
+ topics: params.topics || [],
466
+ participants: params.participants || [],
467
+ });
468
+ indexEpisodeEmbedding(wsp, ep).catch(() => {});
469
+ return text(`OK: Episode saved ${ep.id}. "${ep.summary.slice(0, 100)}..."\n Decisions: ${ep.decisions.length}, Topics: ${ep.topics.join(", ") || "(none)"}, Mood: ${ep.mood || "(none)"}`);
470
+ },
471
+ })));
472
+
473
+ // ─── episode_recall ───
474
+ api.registerTool(withAgent((agentId) => ({
475
+ name: "episode_recall",
476
+ description:
477
+ "Search past conversation episodes by topic/keyword, or get the most recent N episodes. Use to recall 'what did we discuss about X last time'.",
478
+ parameters: {
479
+ type: "object",
480
+ properties: {
481
+ query: { type: "string", description: "Search query (topic, keyword). Omit to get recent episodes." },
482
+ last_n: { type: "number", description: "Number of episodes to return (default: 5)" },
483
+ },
484
+ additionalProperties: false,
485
+ },
486
+ async execute(_id, params) {
487
+ const wsp = ws(agentId, params);
488
+ const lastN = params.last_n || 5;
489
+ const results = await recallEpisodes(wsp, params.query || null, lastN);
490
+ if (results.length === 0) {
491
+ return text(params.query ? `No episodes found for: "${params.query}"` : "No episodes recorded yet.");
492
+ }
493
+ const fmt = results.map((ep, i) => {
494
+ const decisions = ep.decisions?.length ? `\n Decisions: ${ep.decisions.join("; ")}` : "";
495
+ const mood = ep.mood ? ` [${ep.mood}]` : "";
496
+ return `[${i + 1}] (${ep.ts?.slice(0, 10)}) ${ep.summary}${mood}${decisions}`;
497
+ }).join("\n\n");
498
+ return text(`${results.length} episode(s):\n\n${fmt}`);
499
+ },
500
+ })));
501
+
502
+ // ─── memory_reflect ───
503
+ api.registerTool(withAgent((agentId) => ({
504
+ name: "memory_reflect",
505
+ description: [
506
+ "Analyze recent memory for behavioral patterns, topic trends, mood shifts, and memory health.",
507
+ "Returns structured report with: top topics, time-of-day distribution, mood trend,",
508
+ "importance distribution, neglected entities, and forgetting candidates.",
509
+ "Use during heartbeats or when you want to understand what's been happening lately.",
510
+ "After reviewing the report, store meaningful observations via archival_insert with tags=['reflection'].",
511
+ ].join(" "),
512
+ parameters: {
513
+ type: "object",
514
+ properties: {
515
+ window_days: { type: "number", description: "Analysis window in days (default: 7, max: 30)" },
516
+ },
517
+ additionalProperties: false,
518
+ },
519
+ async execute(_id, params) {
520
+ const wsp = ws(agentId, params);
521
+ const window = Math.min(params.window_days || 7, 30);
522
+ const analysis = analyzePatterns(wsp, window);
523
+ const report = formatReflection(analysis);
524
+ return text(report);
525
+ },
526
+ })));
527
+
528
+ // ─── memory_export ───
529
+ api.registerTool(withAgent((agentId) => ({
530
+ name: "memory_export",
531
+ description:
532
+ "Export entire memory (core + archival + embeddings) to a JSON file for backup or migration.",
533
+ parameters: {
534
+ type: "object",
535
+ properties: {
536
+ output_path: { type: "string", description: "Output file path (default: memory/export-YYYY-MM-DD.json)" },
537
+ },
538
+ additionalProperties: false,
539
+ },
540
+ async execute(_id, params) {
541
+ const wsp = ws(agentId, params);
542
+ const { path, stats } = exportMemory(wsp, params.output_path);
543
+ const sizeKB = (readFileSync(path).length / 1024).toFixed(1);
544
+ return text(`OK: Exported to ${path} (${sizeKB}KB)\n Core: ${stats.core_size}B\n Archival: ${stats.archival_count} records\n Embeddings: ${stats.embedding_count}`);
545
+ },
546
+ })));
547
+
548
+ // ─── memory_import ───
549
+ api.registerTool(withAgent((agentId) => ({
550
+ name: "memory_import",
551
+ description:
552
+ "Import a memory export file. Modes: 'replace' (overwrite all) or 'merge' (add missing). Default: merge.",
553
+ parameters: {
554
+ type: "object",
555
+ properties: {
556
+ input_path: { type: "string", description: "Path to export JSON file" },
557
+ mode: { type: "string", description: "'replace' or 'merge' (default: merge)" },
558
+ },
559
+ required: ["input_path"],
560
+ additionalProperties: false,
561
+ },
562
+ async execute(_id, params) {
563
+ const wsp = ws(agentId, params);
564
+ try {
565
+ const result = importMemory(wsp, params.input_path, params.mode || "merge");
566
+ return text(`OK: ${result}`);
567
+ } catch (e) {
568
+ return text(`ERROR: ${e.message}`);
569
+ }
570
+ },
571
+ })));
572
+
573
+ // ─── memory_migrate ───
574
+ api.registerTool(withAgent((agentId) => ({
575
+ name: "memory_migrate",
576
+ description:
577
+ "Migrate memory from JSONL files to SQLite database. SQLite provides FTS5 full-text search and scales to 50K+ records. JSONL files are preserved as backup.",
578
+ parameters: {
579
+ type: "object",
580
+ properties: {},
581
+ additionalProperties: false,
582
+ },
583
+ async execute(_id, params) {
584
+ const wsp = ws(agentId, params);
585
+ try {
586
+ const result = migrateFromJsonl(wsp);
587
+ return text([
588
+ `OK: Migration complete.`,
589
+ ` Archival: ${result.archival} records`,
590
+ ` Graph: ${result.graph} triples`,
591
+ ` Episodes: ${result.episodes} episodes`,
592
+ ` Embeddings: ${result.embeddings} vectors`,
593
+ ``,
594
+ `SQLite database: memory/memory.sqlite`,
595
+ `JSONL files preserved as backup.`,
596
+ ].join("\n"));
597
+ } catch (e) {
598
+ return text(`ERROR: Migration failed: ${e.message}`);
599
+ }
600
+ },
601
+ })));
602
+
603
+ // ─── memory_dashboard ───
604
+ api.registerTool(withAgent((agentId) => ({
605
+ name: "memory_dashboard",
606
+ description:
607
+ "Generate a self-contained HTML dashboard for browsing memory: facts, graph, episodes, reflection, and search. Opens in any browser.",
608
+ parameters: {
609
+ type: "object",
610
+ properties: {
611
+ output_path: { type: "string", description: "Output HTML file path (default: memory/dashboard.html)" },
612
+ },
613
+ additionalProperties: false,
614
+ },
615
+ async execute(_id, params) {
616
+ const wsp = ws(agentId, params);
617
+ try {
618
+ const outPath = generateDashboard(wsp, params.output_path);
619
+ return text(`OK: Dashboard generated at ${outPath}\nOpen in browser: file://${outPath}`);
620
+ } catch (e) {
621
+ return text(`ERROR: Dashboard generation failed: ${e.message}`);
622
+ }
623
+ },
624
+ })));
625
+ },
626
+ });
@@ -0,0 +1,54 @@
1
+ /**
2
+ * Archival storage: unlimited append-only JSONL with in-memory index.
3
+ */
4
+
5
+ import { readFileSync, writeFileSync, appendFileSync, existsSync, mkdirSync } from "node:fs";
6
+ import { join } from "node:path";
7
+ import { archivalPath, DEFAULT_IMPORTANCE } from "./paths.js";
8
+
9
+ export { archivalPath };
10
+
11
+ /** In-memory cache keyed by workspace path. */
12
+ const cache = new Map();
13
+
14
+ export function loadArchival(ws) {
15
+ if (cache.has(ws) && cache.get(ws).loaded) return cache.get(ws).records;
16
+ const p = archivalPath(ws);
17
+ let records = [];
18
+ if (existsSync(p)) {
19
+ records = readFileSync(p, "utf-8")
20
+ .trim()
21
+ .split("\n")
22
+ .filter(Boolean)
23
+ .map((l) => { try { return JSON.parse(l); } catch { return null; } })
24
+ .filter(Boolean);
25
+ }
26
+ cache.set(ws, { records, loaded: true });
27
+ return records;
28
+ }
29
+
30
+ export function appendRecord(ws, entry) {
31
+ const record = {
32
+ id: `arch-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`,
33
+ ts: new Date().toISOString(),
34
+ last_accessed: null,
35
+ access_count: 0,
36
+ importance: entry.importance ?? DEFAULT_IMPORTANCE,
37
+ ...entry,
38
+ };
39
+ mkdirSync(join(ws, "memory"), { recursive: true });
40
+ appendFileSync(archivalPath(ws), JSON.stringify(record) + "\n", "utf-8");
41
+ if (cache.has(ws) && cache.get(ws).loaded) {
42
+ cache.get(ws).records.push(record);
43
+ }
44
+ return record;
45
+ }
46
+
47
+ export function rewriteArchival(ws, records) {
48
+ writeFileSync(
49
+ archivalPath(ws),
50
+ records.map((r) => JSON.stringify(r)).join("\n") + "\n",
51
+ "utf-8",
52
+ );
53
+ cache.set(ws, { records: [...records], loaded: true });
54
+ }