@icex-labs/openclaw-memory-engine 3.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +254 -0
- package/extras/auto-consolidation-crons.json +37 -0
- package/extras/memory-maintenance.sh +176 -0
- package/index.js +626 -0
- package/lib/archival.js +54 -0
- package/lib/backup.js +99 -0
- package/lib/consolidate.js +102 -0
- package/lib/core.js +76 -0
- package/lib/dashboard.js +235 -0
- package/lib/dedup.js +68 -0
- package/lib/embedding.js +70 -0
- package/lib/episodes.js +133 -0
- package/lib/graph.js +148 -0
- package/lib/paths.js +80 -0
- package/lib/reflection.js +188 -0
- package/lib/search.js +90 -0
- package/lib/store-sqlite.js +422 -0
- package/openclaw.plugin.json +23 -0
- package/package.json +40 -0
- package/setup.sh +368 -0
|
@@ -0,0 +1,422 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* SQLite storage backend with FTS5 full-text search.
|
|
3
|
+
*
|
|
4
|
+
* Schema:
|
|
5
|
+
* archival: id, ts, content, entity, tags (JSON), importance, last_accessed, access_count, source, updated_at
|
|
6
|
+
* archival_fts: FTS5 virtual table on (content, entity, tags_text)
|
|
7
|
+
* graph: id, subject, relation, object, ts, source
|
|
8
|
+
* episodes: id, ts, summary, decisions (JSON), mood, topics (JSON), participants (JSON), duration_minutes
|
|
9
|
+
* episodes_fts: FTS5 on (summary, decisions_text, topics_text)
|
|
10
|
+
* embeddings: record_id, vector (BLOB)
|
|
11
|
+
* meta: key, value
|
|
12
|
+
*/
|
|
13
|
+
|
|
14
|
+
import { DatabaseSync } from "node:sqlite";
|
|
15
|
+
import { join } from "node:path";
|
|
16
|
+
import { existsSync, mkdirSync, readFileSync } from "node:fs";
|
|
17
|
+
|
|
18
|
+
const DB_FILENAME = "memory.sqlite";
|
|
19
|
+
|
|
20
|
+
/** Per-workspace database handles. */
|
|
21
|
+
const handles = new Map();
|
|
22
|
+
|
|
23
|
+
function dbPath(ws) { return join(ws, "memory", DB_FILENAME); }
|
|
24
|
+
|
|
25
|
+
function getDb(ws) {
|
|
26
|
+
if (handles.has(ws)) return handles.get(ws);
|
|
27
|
+
mkdirSync(join(ws, "memory"), { recursive: true });
|
|
28
|
+
const db = new DatabaseSync(dbPath(ws));
|
|
29
|
+
db.exec("PRAGMA journal_mode=WAL");
|
|
30
|
+
db.exec("PRAGMA synchronous=NORMAL");
|
|
31
|
+
initSchema(db);
|
|
32
|
+
handles.set(ws, db);
|
|
33
|
+
return db;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
function initSchema(db) {
|
|
37
|
+
db.exec(`
|
|
38
|
+
CREATE TABLE IF NOT EXISTS archival (
|
|
39
|
+
id TEXT PRIMARY KEY,
|
|
40
|
+
ts TEXT,
|
|
41
|
+
content TEXT NOT NULL,
|
|
42
|
+
entity TEXT DEFAULT '',
|
|
43
|
+
tags TEXT DEFAULT '[]',
|
|
44
|
+
importance INTEGER DEFAULT 5,
|
|
45
|
+
last_accessed TEXT,
|
|
46
|
+
access_count INTEGER DEFAULT 0,
|
|
47
|
+
source TEXT DEFAULT '',
|
|
48
|
+
updated_at TEXT
|
|
49
|
+
);
|
|
50
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS archival_fts USING fts5(
|
|
51
|
+
content, entity, tags_text, content='archival', content_rowid='rowid'
|
|
52
|
+
);
|
|
53
|
+
CREATE TRIGGER IF NOT EXISTS archival_ai AFTER INSERT ON archival BEGIN
|
|
54
|
+
INSERT INTO archival_fts(rowid, content, entity, tags_text)
|
|
55
|
+
VALUES (new.rowid, new.content, new.entity, new.tags);
|
|
56
|
+
END;
|
|
57
|
+
CREATE TRIGGER IF NOT EXISTS archival_ad AFTER DELETE ON archival BEGIN
|
|
58
|
+
INSERT INTO archival_fts(archival_fts, rowid, content, entity, tags_text)
|
|
59
|
+
VALUES ('delete', old.rowid, old.content, old.entity, old.tags);
|
|
60
|
+
END;
|
|
61
|
+
CREATE TRIGGER IF NOT EXISTS archival_au AFTER UPDATE ON archival BEGIN
|
|
62
|
+
INSERT INTO archival_fts(archival_fts, rowid, content, entity, tags_text)
|
|
63
|
+
VALUES ('delete', old.rowid, old.content, old.entity, old.tags);
|
|
64
|
+
INSERT INTO archival_fts(rowid, content, entity, tags_text)
|
|
65
|
+
VALUES (new.rowid, new.content, new.entity, new.tags);
|
|
66
|
+
END;
|
|
67
|
+
|
|
68
|
+
CREATE TABLE IF NOT EXISTS graph (
|
|
69
|
+
id TEXT PRIMARY KEY,
|
|
70
|
+
subject TEXT NOT NULL,
|
|
71
|
+
relation TEXT NOT NULL,
|
|
72
|
+
object TEXT NOT NULL,
|
|
73
|
+
ts TEXT,
|
|
74
|
+
source TEXT
|
|
75
|
+
);
|
|
76
|
+
CREATE INDEX IF NOT EXISTS idx_graph_subject ON graph(subject COLLATE NOCASE);
|
|
77
|
+
CREATE INDEX IF NOT EXISTS idx_graph_object ON graph(object COLLATE NOCASE);
|
|
78
|
+
|
|
79
|
+
CREATE TABLE IF NOT EXISTS episodes (
|
|
80
|
+
id TEXT PRIMARY KEY,
|
|
81
|
+
ts TEXT,
|
|
82
|
+
summary TEXT NOT NULL,
|
|
83
|
+
decisions TEXT DEFAULT '[]',
|
|
84
|
+
mood TEXT DEFAULT '',
|
|
85
|
+
topics TEXT DEFAULT '[]',
|
|
86
|
+
participants TEXT DEFAULT '[]',
|
|
87
|
+
duration_minutes INTEGER
|
|
88
|
+
);
|
|
89
|
+
CREATE VIRTUAL TABLE IF NOT EXISTS episodes_fts USING fts5(
|
|
90
|
+
summary, decisions_text, topics_text, content='episodes', content_rowid='rowid'
|
|
91
|
+
);
|
|
92
|
+
CREATE TRIGGER IF NOT EXISTS episodes_ai AFTER INSERT ON episodes BEGIN
|
|
93
|
+
INSERT INTO episodes_fts(rowid, summary, decisions_text, topics_text)
|
|
94
|
+
VALUES (new.rowid, new.summary, new.decisions, new.topics);
|
|
95
|
+
END;
|
|
96
|
+
|
|
97
|
+
CREATE TABLE IF NOT EXISTS embeddings (
|
|
98
|
+
record_id TEXT PRIMARY KEY,
|
|
99
|
+
vector BLOB NOT NULL
|
|
100
|
+
);
|
|
101
|
+
|
|
102
|
+
CREATE TABLE IF NOT EXISTS meta (
|
|
103
|
+
key TEXT PRIMARY KEY,
|
|
104
|
+
value TEXT
|
|
105
|
+
);
|
|
106
|
+
`);
|
|
107
|
+
|
|
108
|
+
// Set schema version
|
|
109
|
+
const stmt = db.prepare("INSERT OR REPLACE INTO meta(key, value) VALUES('schema_version', '1')");
|
|
110
|
+
stmt.run();
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// ═══════════════════════════════════════════════════════════════════
|
|
114
|
+
// Archival operations
|
|
115
|
+
// ═══════════════════════════════════════════════════════════════════
|
|
116
|
+
|
|
117
|
+
export function sqliteLoadArchival(ws) {
|
|
118
|
+
const db = getDb(ws);
|
|
119
|
+
const rows = db.prepare("SELECT * FROM archival ORDER BY ts ASC").all();
|
|
120
|
+
return rows.map((r) => ({
|
|
121
|
+
...r,
|
|
122
|
+
tags: JSON.parse(r.tags || "[]"),
|
|
123
|
+
}));
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
export function sqliteAppendRecord(ws, entry) {
|
|
127
|
+
const db = getDb(ws);
|
|
128
|
+
const id = `arch-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
|
|
129
|
+
const record = {
|
|
130
|
+
id,
|
|
131
|
+
ts: new Date().toISOString(),
|
|
132
|
+
content: entry.content,
|
|
133
|
+
entity: entry.entity || "",
|
|
134
|
+
tags: entry.tags || [],
|
|
135
|
+
importance: entry.importance ?? 5,
|
|
136
|
+
last_accessed: null,
|
|
137
|
+
access_count: 0,
|
|
138
|
+
source: entry.source || "",
|
|
139
|
+
updated_at: null,
|
|
140
|
+
};
|
|
141
|
+
db.prepare(`
|
|
142
|
+
INSERT INTO archival(id, ts, content, entity, tags, importance, last_accessed, access_count, source, updated_at)
|
|
143
|
+
VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
144
|
+
`).run(id, record.ts, record.content, record.entity, JSON.stringify(record.tags),
|
|
145
|
+
record.importance, record.last_accessed, record.access_count, record.source, record.updated_at);
|
|
146
|
+
return record;
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
export function sqliteUpdateRecord(ws, id, updates) {
|
|
150
|
+
const db = getDb(ws);
|
|
151
|
+
const sets = [];
|
|
152
|
+
const vals = [];
|
|
153
|
+
if (updates.content !== undefined) { sets.push("content=?"); vals.push(updates.content); }
|
|
154
|
+
if (updates.entity !== undefined) { sets.push("entity=?"); vals.push(updates.entity); }
|
|
155
|
+
if (updates.tags !== undefined) { sets.push("tags=?"); vals.push(JSON.stringify(updates.tags)); }
|
|
156
|
+
if (updates.importance !== undefined) { sets.push("importance=?"); vals.push(updates.importance); }
|
|
157
|
+
if (updates.last_accessed !== undefined) { sets.push("last_accessed=?"); vals.push(updates.last_accessed); }
|
|
158
|
+
if (updates.access_count !== undefined) { sets.push("access_count=?"); vals.push(updates.access_count); }
|
|
159
|
+
sets.push("updated_at=?"); vals.push(new Date().toISOString());
|
|
160
|
+
vals.push(id);
|
|
161
|
+
db.prepare(`UPDATE archival SET ${sets.join(", ")} WHERE id=?`).run(...vals);
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
export function sqliteDeleteRecord(ws, id) {
|
|
165
|
+
const db = getDb(ws);
|
|
166
|
+
const row = db.prepare("SELECT content FROM archival WHERE id=?").get(id);
|
|
167
|
+
if (!row) return null;
|
|
168
|
+
db.prepare("DELETE FROM archival WHERE id=?").run(id);
|
|
169
|
+
db.prepare("DELETE FROM embeddings WHERE record_id=?").run(id);
|
|
170
|
+
return row.content;
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
/**
|
|
174
|
+
* FTS5 keyword search + scoring.
|
|
175
|
+
* @returns {Array<{ record, score }>}
|
|
176
|
+
*/
|
|
177
|
+
export function sqliteFtsSearch(ws, query, topK = 5) {
|
|
178
|
+
const db = getDb(ws);
|
|
179
|
+
try {
|
|
180
|
+
const rows = db.prepare(`
|
|
181
|
+
SELECT a.*, rank
|
|
182
|
+
FROM archival_fts fts
|
|
183
|
+
JOIN archival a ON a.rowid = fts.rowid
|
|
184
|
+
WHERE archival_fts MATCH ?
|
|
185
|
+
ORDER BY rank
|
|
186
|
+
LIMIT ?
|
|
187
|
+
`).all(query, topK * 2); // over-fetch for re-ranking
|
|
188
|
+
|
|
189
|
+
return rows.map((r) => ({
|
|
190
|
+
record: { ...r, tags: JSON.parse(r.tags || "[]") },
|
|
191
|
+
ftsScore: -r.rank, // FTS5 rank is negative (lower = better)
|
|
192
|
+
}));
|
|
193
|
+
} catch {
|
|
194
|
+
// FTS5 query syntax error — fall back to LIKE
|
|
195
|
+
const likeQuery = `%${query}%`;
|
|
196
|
+
const rows = db.prepare(`
|
|
197
|
+
SELECT * FROM archival
|
|
198
|
+
WHERE content LIKE ? OR entity LIKE ?
|
|
199
|
+
ORDER BY ts DESC
|
|
200
|
+
LIMIT ?
|
|
201
|
+
`).all(likeQuery, likeQuery, topK);
|
|
202
|
+
return rows.map((r) => ({
|
|
203
|
+
record: { ...r, tags: JSON.parse(r.tags || "[]") },
|
|
204
|
+
ftsScore: 1,
|
|
205
|
+
}));
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
export function sqliteArchivalCount(ws) {
|
|
210
|
+
const db = getDb(ws);
|
|
211
|
+
return db.prepare("SELECT COUNT(*) as cnt FROM archival").get().cnt;
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
export function sqliteArchivalStats(ws) {
|
|
215
|
+
const db = getDb(ws);
|
|
216
|
+
const total = db.prepare("SELECT COUNT(*) as cnt FROM archival").get().cnt;
|
|
217
|
+
const embCount = db.prepare("SELECT COUNT(*) as cnt FROM embeddings").get().cnt;
|
|
218
|
+
const graphCount = db.prepare("SELECT COUNT(*) as cnt FROM graph").get().cnt;
|
|
219
|
+
const episodeCount = db.prepare("SELECT COUNT(*) as cnt FROM episodes").get().cnt;
|
|
220
|
+
|
|
221
|
+
const entities = db.prepare(`
|
|
222
|
+
SELECT entity, COUNT(*) as cnt FROM archival
|
|
223
|
+
WHERE entity != '' GROUP BY entity ORDER BY cnt DESC LIMIT 10
|
|
224
|
+
`).all();
|
|
225
|
+
|
|
226
|
+
return { total, embCount, graphCount, episodeCount, entities };
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
// ═══════════════════════════════════════════════════════════════════
|
|
230
|
+
// Graph operations
|
|
231
|
+
// ═══════════════════════════════════════════════════════════════════
|
|
232
|
+
|
|
233
|
+
export function sqliteAddTriple(ws, subject, relation, object, sourceId = null) {
|
|
234
|
+
const db = getDb(ws);
|
|
235
|
+
const existing = db.prepare(
|
|
236
|
+
"SELECT id FROM graph WHERE subject=? COLLATE NOCASE AND relation=? COLLATE NOCASE AND object=? COLLATE NOCASE"
|
|
237
|
+
).get(subject, relation, object);
|
|
238
|
+
if (existing) return null;
|
|
239
|
+
|
|
240
|
+
const id = `tri-${Date.now()}-${Math.random().toString(36).slice(2, 6)}`;
|
|
241
|
+
db.prepare("INSERT INTO graph(id, subject, relation, object, ts, source) VALUES(?,?,?,?,?,?)")
|
|
242
|
+
.run(id, subject, relation, object, new Date().toISOString(), sourceId);
|
|
243
|
+
return { id, s: subject, r: relation, o: object };
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
export function sqliteQueryGraph(ws, entity, relation = null, depth = 2) {
|
|
247
|
+
const db = getDb(ws);
|
|
248
|
+
const results = [];
|
|
249
|
+
const visited = new Set();
|
|
250
|
+
|
|
251
|
+
function traverse(current, d, path) {
|
|
252
|
+
if (d > depth) return;
|
|
253
|
+
const key = `${current}:${d}`;
|
|
254
|
+
if (visited.has(key)) return;
|
|
255
|
+
visited.add(key);
|
|
256
|
+
|
|
257
|
+
let rows;
|
|
258
|
+
if (relation) {
|
|
259
|
+
rows = db.prepare(
|
|
260
|
+
"SELECT * FROM graph WHERE (subject=? COLLATE NOCASE OR object=? COLLATE NOCASE) AND relation=? COLLATE NOCASE"
|
|
261
|
+
).all(current, current, relation);
|
|
262
|
+
} else {
|
|
263
|
+
rows = db.prepare(
|
|
264
|
+
"SELECT * FROM graph WHERE subject=? COLLATE NOCASE OR object=? COLLATE NOCASE"
|
|
265
|
+
).all(current, current);
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
for (const t of rows) {
|
|
269
|
+
const isForward = t.subject.toLowerCase() === current.toLowerCase();
|
|
270
|
+
const node = isForward ? t.object : t.subject;
|
|
271
|
+
const dir = isForward ? `--${t.relation}-->` : `<--${t.relation}--`;
|
|
272
|
+
results.push({ path: [...path, dir], node, triple: { id: t.id, s: t.subject, r: t.relation, o: t.object } });
|
|
273
|
+
traverse(node, d + 1, [...path, dir, node]);
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
|
|
277
|
+
traverse(entity, 1, [entity]);
|
|
278
|
+
return results;
|
|
279
|
+
}
|
|
280
|
+
|
|
281
|
+
// ═══════════════════════════════════════════════════════════════════
|
|
282
|
+
// Episode operations
|
|
283
|
+
// ═══════════════════════════════════════════════════════════════════
|
|
284
|
+
|
|
285
|
+
export function sqliteSaveEpisode(ws, ep) {
|
|
286
|
+
const db = getDb(ws);
|
|
287
|
+
const id = `ep-${Date.now()}-${Math.random().toString(36).slice(2, 6)}`;
|
|
288
|
+
db.prepare(`
|
|
289
|
+
INSERT INTO episodes(id, ts, summary, decisions, mood, topics, participants, duration_minutes)
|
|
290
|
+
VALUES(?,?,?,?,?,?,?,?)
|
|
291
|
+
`).run(id, new Date().toISOString(), ep.summary,
|
|
292
|
+
JSON.stringify(ep.decisions || []), ep.mood || "",
|
|
293
|
+
JSON.stringify(ep.topics || []), JSON.stringify(ep.participants || []),
|
|
294
|
+
ep.duration_minutes || null);
|
|
295
|
+
return { id, type: "episode", ts: new Date().toISOString(), ...ep };
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
export function sqliteSearchEpisodes(ws, query, lastN = 5) {
|
|
299
|
+
const db = getDb(ws);
|
|
300
|
+
if (!query) {
|
|
301
|
+
const rows = db.prepare("SELECT * FROM episodes ORDER BY ts DESC LIMIT ?").all(lastN);
|
|
302
|
+
return rows.map((r) => ({
|
|
303
|
+
...r, decisions: JSON.parse(r.decisions || "[]"),
|
|
304
|
+
topics: JSON.parse(r.topics || "[]"), participants: JSON.parse(r.participants || "[]"),
|
|
305
|
+
}));
|
|
306
|
+
}
|
|
307
|
+
try {
|
|
308
|
+
const rows = db.prepare(`
|
|
309
|
+
SELECT e.* FROM episodes_fts fts
|
|
310
|
+
JOIN episodes e ON e.rowid = fts.rowid
|
|
311
|
+
WHERE episodes_fts MATCH ?
|
|
312
|
+
ORDER BY rank LIMIT ?
|
|
313
|
+
`).all(query, lastN);
|
|
314
|
+
return rows.map((r) => ({
|
|
315
|
+
...r, decisions: JSON.parse(r.decisions || "[]"),
|
|
316
|
+
topics: JSON.parse(r.topics || "[]"), participants: JSON.parse(r.participants || "[]"),
|
|
317
|
+
}));
|
|
318
|
+
} catch {
|
|
319
|
+
const rows = db.prepare("SELECT * FROM episodes WHERE summary LIKE ? ORDER BY ts DESC LIMIT ?")
|
|
320
|
+
.all(`%${query}%`, lastN);
|
|
321
|
+
return rows.map((r) => ({
|
|
322
|
+
...r, decisions: JSON.parse(r.decisions || "[]"),
|
|
323
|
+
topics: JSON.parse(r.topics || "[]"), participants: JSON.parse(r.participants || "[]"),
|
|
324
|
+
}));
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
// ═══════════════════════════════════════════════════════════════════
|
|
329
|
+
// Embedding operations
|
|
330
|
+
// ═══════════════════════════════════════════════════════════════════
|
|
331
|
+
|
|
332
|
+
export function sqliteGetEmbedding(ws, recordId) {
|
|
333
|
+
const db = getDb(ws);
|
|
334
|
+
const row = db.prepare("SELECT vector FROM embeddings WHERE record_id=?").get(recordId);
|
|
335
|
+
if (!row) return null;
|
|
336
|
+
return Array.from(new Float32Array(row.vector.buffer));
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
export function sqliteSaveEmbedding(ws, recordId, vector) {
|
|
340
|
+
const db = getDb(ws);
|
|
341
|
+
const buf = Buffer.from(new Float32Array(vector).buffer);
|
|
342
|
+
db.prepare("INSERT OR REPLACE INTO embeddings(record_id, vector) VALUES(?,?)").run(recordId, buf);
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
// ═══════════════════════════════════════════════════════════════════
|
|
346
|
+
// Migration: JSONL → SQLite
|
|
347
|
+
// ═══════════════════════════════════════════════════════════════════
|
|
348
|
+
|
|
349
|
+
export function migrateFromJsonl(ws) {
|
|
350
|
+
const db = getDb(ws);
|
|
351
|
+
const memDir = join(ws, "memory");
|
|
352
|
+
let imported = { archival: 0, graph: 0, episodes: 0, embeddings: 0 };
|
|
353
|
+
|
|
354
|
+
// Archival
|
|
355
|
+
const archivalFile = join(memDir, "archival.jsonl");
|
|
356
|
+
if (existsSync(archivalFile)) {
|
|
357
|
+
const lines = readFileSync(archivalFile, "utf-8").trim().split("\n").filter(Boolean);
|
|
358
|
+
const insert = db.prepare(`
|
|
359
|
+
INSERT OR IGNORE INTO archival(id, ts, content, entity, tags, importance, last_accessed, access_count, source, updated_at)
|
|
360
|
+
VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
361
|
+
`);
|
|
362
|
+
for (const line of lines) {
|
|
363
|
+
try {
|
|
364
|
+
const r = JSON.parse(line);
|
|
365
|
+
insert.run(r.id, r.ts, r.content, r.entity || "", JSON.stringify(r.tags || []),
|
|
366
|
+
r.importance ?? 5, r.last_accessed, r.access_count || 0, r.source || "", r.updated_at || null);
|
|
367
|
+
imported.archival++;
|
|
368
|
+
} catch { /* skip bad lines */ }
|
|
369
|
+
}
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
// Graph
|
|
373
|
+
const graphFile = join(memDir, "graph.jsonl");
|
|
374
|
+
if (existsSync(graphFile)) {
|
|
375
|
+
const lines = readFileSync(graphFile, "utf-8").trim().split("\n").filter(Boolean);
|
|
376
|
+
const insert = db.prepare("INSERT OR IGNORE INTO graph(id, subject, relation, object, ts, source) VALUES(?,?,?,?,?,?)");
|
|
377
|
+
for (const line of lines) {
|
|
378
|
+
try {
|
|
379
|
+
const t = JSON.parse(line);
|
|
380
|
+
insert.run(t.id, t.s, t.r, t.o, t.ts, t.source);
|
|
381
|
+
imported.graph++;
|
|
382
|
+
} catch { /* skip */ }
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
// Episodes
|
|
387
|
+
const epFile = join(memDir, "episodes.jsonl");
|
|
388
|
+
if (existsSync(epFile)) {
|
|
389
|
+
const lines = readFileSync(epFile, "utf-8").trim().split("\n").filter(Boolean);
|
|
390
|
+
const insert = db.prepare(`
|
|
391
|
+
INSERT OR IGNORE INTO episodes(id, ts, summary, decisions, mood, topics, participants, duration_minutes)
|
|
392
|
+
VALUES(?,?,?,?,?,?,?,?)
|
|
393
|
+
`);
|
|
394
|
+
for (const line of lines) {
|
|
395
|
+
try {
|
|
396
|
+
const e = JSON.parse(line);
|
|
397
|
+
insert.run(e.id, e.ts, e.summary, JSON.stringify(e.decisions || []),
|
|
398
|
+
e.mood || "", JSON.stringify(e.topics || []), JSON.stringify(e.participants || []),
|
|
399
|
+
e.duration_minutes || null);
|
|
400
|
+
imported.episodes++;
|
|
401
|
+
} catch { /* skip */ }
|
|
402
|
+
}
|
|
403
|
+
}
|
|
404
|
+
|
|
405
|
+
// Embeddings
|
|
406
|
+
const embFile = join(memDir, "archival.embeddings.json");
|
|
407
|
+
if (existsSync(embFile)) {
|
|
408
|
+
try {
|
|
409
|
+
const data = JSON.parse(readFileSync(embFile, "utf-8"));
|
|
410
|
+
const insert = db.prepare("INSERT OR IGNORE INTO embeddings(record_id, vector) VALUES(?,?)");
|
|
411
|
+
for (const [id, vec] of Object.entries(data)) {
|
|
412
|
+
const buf = Buffer.from(new Float32Array(vec).buffer);
|
|
413
|
+
insert.run(id, buf);
|
|
414
|
+
imported.embeddings++;
|
|
415
|
+
}
|
|
416
|
+
} catch { /* skip */ }
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
return imported;
|
|
420
|
+
}
|
|
421
|
+
|
|
422
|
+
export { dbPath };
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
{
|
|
2
|
+
"id": "memory-engine",
|
|
3
|
+
"name": "Memory Engine",
|
|
4
|
+
"description": "MemGPT-style hierarchical memory: core memory block + archival storage with semantic search, knowledge graph, episodic memory, and dashboard",
|
|
5
|
+
"configSchema": {
|
|
6
|
+
"type": "object",
|
|
7
|
+
"properties": {
|
|
8
|
+
"workspace": {
|
|
9
|
+
"type": "string",
|
|
10
|
+
"description": "Path to workspace directory"
|
|
11
|
+
},
|
|
12
|
+
"coreSizeLimit": {
|
|
13
|
+
"type": "number",
|
|
14
|
+
"description": "Max bytes for core.json (default: 3072)"
|
|
15
|
+
},
|
|
16
|
+
"sharing": {
|
|
17
|
+
"type": "boolean",
|
|
18
|
+
"description": "Enable cross-agent memory sharing (default: true). Set false for privacy-sensitive multi-user setups."
|
|
19
|
+
}
|
|
20
|
+
},
|
|
21
|
+
"additionalProperties": false
|
|
22
|
+
}
|
|
23
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@icex-labs/openclaw-memory-engine",
|
|
3
|
+
"version": "3.3.1",
|
|
4
|
+
"description": "MemGPT-style hierarchical memory plugin for OpenClaw — core memory block + archival storage with semantic search",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "index.js",
|
|
7
|
+
"openclaw": {
|
|
8
|
+
"extensions": ["./index.js"]
|
|
9
|
+
},
|
|
10
|
+
"keywords": [
|
|
11
|
+
"openclaw",
|
|
12
|
+
"openclaw-plugin",
|
|
13
|
+
"memory",
|
|
14
|
+
"memgpt",
|
|
15
|
+
"letta",
|
|
16
|
+
"archival",
|
|
17
|
+
"ai-memory"
|
|
18
|
+
],
|
|
19
|
+
"author": "icex-labs",
|
|
20
|
+
"license": "MIT",
|
|
21
|
+
"repository": {
|
|
22
|
+
"type": "git",
|
|
23
|
+
"url": "git+https://github.com/icex-labs/openclaw-memory-engine.git"
|
|
24
|
+
},
|
|
25
|
+
"homepage": "https://github.com/icex-labs/openclaw-memory-engine#readme",
|
|
26
|
+
"engines": {
|
|
27
|
+
"node": ">=22"
|
|
28
|
+
},
|
|
29
|
+
"peerDependencies": {
|
|
30
|
+
"openclaw": ">=2026.3.0"
|
|
31
|
+
},
|
|
32
|
+
"files": [
|
|
33
|
+
"index.js",
|
|
34
|
+
"lib/",
|
|
35
|
+
"extras/",
|
|
36
|
+
"setup.sh",
|
|
37
|
+
"openclaw.plugin.json",
|
|
38
|
+
"README.md"
|
|
39
|
+
]
|
|
40
|
+
}
|