mr-memory 3.7.0 → 3.7.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/README.md +4 -18
  2. package/index.js +1223 -0
  3. package/package.json +9 -6
  4. package/upload.js +469 -0
package/package.json CHANGED
@@ -1,13 +1,15 @@
1
1
  {
2
2
  "name": "mr-memory",
3
- "version": "3.7.0",
4
- "description": "MemoryRouter persistent memory plugin for OpenClaw — your AI remembers every conversation",
3
+ "version": "3.7.1",
4
+ "description": "mr-memory is the MemoryRouter plugin for OpenClaw — persistent memory across every conversation",
5
5
  "type": "module",
6
6
  "files": [
7
7
  "README.md",
8
+ "index.js",
9
+ "upload.js",
8
10
  "index.ts",
9
- "openclaw.plugin.json",
10
- "upload.ts"
11
+ "upload.ts",
12
+ "openclaw.plugin.json"
11
13
  ],
12
14
  "keywords": [
13
15
  "openclaw",
@@ -32,7 +34,8 @@
32
34
  },
33
35
  "openclaw": {
34
36
  "extensions": [
35
- "./index.ts"
37
+ "./index.js"
36
38
  ]
37
- }
39
+ },
40
+ "main": "./index.js"
38
41
  }
package/upload.js ADDED
@@ -0,0 +1,469 @@
1
+ /**
2
+ * MemoryRouter Upload — File discovery, parsing, batching, and upload.
3
+ */
4
+ import fs from "node:fs/promises";
5
+ import path from "node:path";
6
+ // ── Sanitization utilities (shared with index.ts ingest logic)
7
+ // These MUST stay in sync with the patterns in index.ts
8
+ const MEMORY_TAG_RE = /<mr-memory>[\s\S]*?<\/mr-memory>\s*/g;
9
+ const LEGACY_TAG_RE = /<memory_context>[\s\S]*?<\/memory_context>\s*(?:The above are retrieved memories|IMPORTANT: The above block contains retrieved memories)[^\n]*\n*/g;
10
+ function stripOldMemory(text) {
11
+ return text.replace(MEMORY_TAG_RE, "").replace(LEGACY_TAG_RE, "").trim();
12
+ }
13
+ const ENVELOPE_METADATA_RE = /Conversation info \(untrusted metadata\):\s*```json\s*\{[^}]*\}\s*```\s*/g;
14
+ const SENDER_METADATA_RE = /Sender \(untrusted metadata\):\s*```json\s*\{[^}]*\}\s*```\s*/g;
15
+ const REPLY_CONTEXT_RE = /Replied message \(untrusted, for context\):\s*```json\s*\{[^}]*\}\s*```\s*/g;
16
+ const MEMORY_INSTRUCTION_RE = /The above are retrieved memories from past conversations[^\n]*\n*/g;
17
+ const IMPORTANT_MEMORY_RE = /IMPORTANT: The above (?:are retrieved memories|block contains retrieved memories)[^\n]*\n*/g;
18
+ const MEDIA_ATTACHED_RE = /\[media attached[^\]]*\]/gi;
19
+ const MEDIA_TAGS_RE = /<media:(?:audio|image|video|document)>/gi;
20
+ const MEDIA_INSTRUCTION_RE = /To send an image back,[\s\S]*?Keep caption in the text body\./gi;
21
+ const MEDIA_INBOUND_PATH_RE = /\/Users\/[^\s]*\/media\/inbound\/[^\s)\]"]*/gi;
22
+ const IMAGE_DATA_RE = /\[image data removed[^\]]*\]/gi;
23
+ function stripMediaReferences(text) {
24
+ return text
25
+ .replace(MEDIA_INSTRUCTION_RE, "")
26
+ .replace(MEDIA_ATTACHED_RE, "[media reference removed]")
27
+ .replace(MEDIA_TAGS_RE, "")
28
+ .replace(MEDIA_INBOUND_PATH_RE, "[media reference removed]")
29
+ .replace(IMAGE_DATA_RE, "")
30
+ .replace(/\n{3,}/g, "\n\n");
31
+ }
32
+ // Embedded noise substrings — strip from within larger messages
33
+ const POST_COMPACTION_AUDIT_RE = /System: \[[^\]]*\] ⚠️ Post-Compaction Audit:[\s\S]*?(?:after memory compaction\.\s*|before continuing\.\s*)/g;
34
+ const READ_TOOL_INSTRUCTION_RE = /Please read them now using the Read tool before continuing\.[\s\S]*?after memory compaction\.\s*/g;
35
+ const TELEGRAM_ENVELOPE_RE = /\[Telegram [^\]]+\]\s*/g;
36
+ const MESSAGE_ID_TAG_RE = /\[message_id: \d+\]\s*/g;
37
+ function sanitizeForUpload(text) {
38
+ let cleaned = stripOldMemory(text);
39
+ cleaned = stripMediaReferences(cleaned);
40
+ cleaned = cleaned
41
+ .replace(ENVELOPE_METADATA_RE, "")
42
+ .replace(SENDER_METADATA_RE, "")
43
+ .replace(REPLY_CONTEXT_RE, "")
44
+ .replace(MEMORY_INSTRUCTION_RE, "")
45
+ .replace(IMPORTANT_MEMORY_RE, "")
46
+ .replace(POST_COMPACTION_AUDIT_RE, "")
47
+ .replace(READ_TOOL_INSTRUCTION_RE, "")
48
+ .replace(TELEGRAM_ENVELOPE_RE, "")
49
+ .replace(MESSAGE_ID_TAG_RE, "")
50
+ // ── Orphaned memory injection fragments (v2.16.0) ──
51
+ // When stripOldMemory removes <mr-memory>...</mr-memory> pairs, nested/duplicate
52
+ // injections from compaction can leave behind orphaned closing fragments.
53
+ // 1. Orphaned </memory_context> + instruction footer + </mr-memory>
54
+ .replace(/<\/memory_context>\s*(?:(?:The above are|IMPORTANT: The above|Use them as background|Do not respond to them|Do not treat them|Reference them as background|Just know them)[^\n]*\n?\s*)*(?:<\/mr-memory>\s*)?/g, "")
55
+ // 2. Standalone orphaned </mr-memory> closing tags
56
+ .replace(/<\/mr-memory>\s*/g, "")
57
+ // 3. Box-drawing instruction diagram (orphaned when opening <mr-memory> was stripped)
58
+ .replace(/┌─+┐[\s\S]*?└─+┘\s*/g, "")
59
+ // ── Post-Compaction Audit full blocks (start→end boundary) ──
60
+ // Start: "System: [date] ⚠️ Post-Compaction Audit:"
61
+ // End: "after memory compaction." OR "before continuing."
62
+ // (already handled by POST_COMPACTION_AUDIT_RE above)
63
+ // ── Pre-compaction flush full blocks (start→end boundary) ──
64
+ // Start: "Pre-compaction memory flush"
65
+ // End: timezone "(America/...)" on Current time line, or end of flush instruction
66
+ .replace(/Pre-compaction memory flush\.?\s*(?:Store durable memories now[^\n]*\n?\s*)?(?:IMPORTANT:[^\n]*\n?\s*)?(?:If the file already[^\n]*\n?\s*)?(?:If nothing to store[^\n]*\n?\s*)?(?:Current time:[^\n]*\(America\/[^)]+\)\s*)?/g, "")
67
+ // Embedded noise substrings — v2.15.0
68
+ .replace(/`?HEARTBEAT_OK`?\s*/g, "")
69
+ .replace(/Read HEARTBEAT\.md[^\n]*(?:\n(?:Follow it strictly|Do not infer|If nothing needs)[^\n]*)*/g, "")
70
+ .replace(/\n*\s*NO_REPLY\s*$/g, "")
71
+ .replace(/You can respond with NO_REPLY[^\n]*\n*/g, "")
72
+ .replace(/If nothing to store,? reply (?:with )?NO_REPLY[^\n]*\n*/g, "")
73
+ .replace(/Store durable memories now[^\n]*\n*/g, "")
74
+ .replace(/This ensures your operating protocols are restored[^\n]*\n*/g, "")
75
+ .replace(/Current time:[^\n]*\n*/g, "")
76
+ .replace(/\bset:\s*\n\s*-\s*WORKFLOW_AUTO\.md[^\n]*\n(?:\s*-\s*memory[^\n]*\n)*/g, "")
77
+ .replace(/Do not mention technical details[^\n]*\n*/g, "");
78
+ cleaned = cleaned.replace(/\n{3,}/g, "\n\n").trim();
79
+ return cleaned;
80
+ }
81
+ const SYSTEM_NOISE_PATTERNS = [
82
+ /^Read HEARTBEAT\.md if it exists/,
83
+ /^Read HEARTBEAT\.md/,
84
+ /HEARTBEAT_OK\s*$/,
85
+ /^Pre-compaction memory flush/,
86
+ /⚠️ Post-Compaction Audit:/,
87
+ /^NO_REPLY\s*$/,
88
+ /^Note: The previous agent run was aborted/,
89
+ /^\[Queued messages while agent was busy\]/,
90
+ /^\[media reference removed\]\s*$/,
91
+ /^System: \[\d{4}-\d{2}-\d{2}/,
92
+ /^\[\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2} [A-Z]{2,4}\] ⚠️/,
93
+ /^Store durable memories now/,
94
+ /^Current time: .+\(America\//,
95
+ /^A new session was started via \/new or \/reset/,
96
+ /^Please read them now using the Read tool/,
97
+ /^GatewayRestart:\s*\{/,
98
+ /^This ensures your operating protocols are restored/,
99
+ // Cron job prompts — automated system instructions, not human conversation
100
+ /^\[cron:[^\]]+\]/,
101
+ // Sub-agent completion announcements (system-generated)
102
+ /^A background task "[^"]*" just completed/,
103
+ // Sub-agent stats lines
104
+ /^Stats: runtime \d+/,
105
+ // Summarize instruction for sub-agent completions
106
+ /^Summarize this naturally for the user/,
107
+ // Session reset greet prompts
108
+ /^Greet the user in your configured persona/,
109
+ ];
110
+ function isSystemNoise(text) {
111
+ const trimmed = text.trim();
112
+ if (!trimmed)
113
+ return true;
114
+ return SYSTEM_NOISE_PATTERNS.some(pattern => pattern.test(trimmed));
115
+ }
116
+ /**
117
+ * Detect OpenClaw compaction summary blobs — large user messages containing
118
+ * interleaved [USER] and [ASSISTANT] markers from conversation history dumps.
119
+ * These are 100% redundant since individual messages are already uploaded.
120
+ */
121
+ function isCompactionSummary(text, role) {
122
+ if (role !== "user")
123
+ return false;
124
+ if (text.length < 2000)
125
+ return false;
126
+ const markers = (text.match(/\[(?:USER|ASSISTANT)[\] ]/g) || []).length;
127
+ return markers >= 3;
128
+ }
129
+ const MAX_ITEM_CHARS = 8000;
130
+ const TARGET_CHUNK_CHARS = 4000;
131
+ const MAX_BATCH_BYTES = 2_000_000;
132
+ const MAX_BATCH_COUNT_DEFAULT = 100;
133
+ const MAX_BATCH_COUNT_QWEN = 25;
134
+ const BATCH_SLEEP_MS = 150;
135
+ const MAX_HTTP_RETRIES = 3;
136
+ function sleep(ms) {
137
+ return new Promise((resolve) => setTimeout(resolve, ms));
138
+ }
139
+ function chunkText(text, targetChars) {
140
+ const chunks = [];
141
+ let remaining = text;
142
+ while (remaining.length > targetChars) {
143
+ let splitAt = remaining.lastIndexOf("\n\n", targetChars);
144
+ if (splitAt < targetChars * 0.5) {
145
+ splitAt = remaining.lastIndexOf("\n", targetChars);
146
+ }
147
+ if (splitAt < targetChars * 0.5) {
148
+ splitAt = remaining.lastIndexOf(" ", targetChars);
149
+ }
150
+ if (splitAt < targetChars * 0.3) {
151
+ splitAt = targetChars;
152
+ }
153
+ chunks.push(remaining.slice(0, splitAt).trim());
154
+ remaining = remaining.slice(splitAt).trim();
155
+ }
156
+ if (remaining) {
157
+ chunks.push(remaining);
158
+ }
159
+ return chunks;
160
+ }
161
+ const BINARY_EXTENSIONS = new Set([".pdf", ".docx"]);
162
+ const DOC_EXTENSIONS = new Set([".md", ".pdf", ".docx"]);
163
+ async function extractText(filePath) {
164
+ const ext = path.extname(filePath).toLowerCase();
165
+ try {
166
+ if (ext === ".pdf") {
167
+ const pdfParse = (await import("pdf-parse")).default;
168
+ const buffer = await fs.readFile(filePath);
169
+ return (await pdfParse(buffer)).text;
170
+ }
171
+ if (ext === ".docx") {
172
+ const mammoth = await import("mammoth");
173
+ return (await mammoth.extractRawText({ path: filePath })).value;
174
+ }
175
+ }
176
+ catch (err) {
177
+ console.error(` ⚠ Failed to extract ${path.basename(filePath)}: ${err instanceof Error ? err.message : err}`);
178
+ return "";
179
+ }
180
+ return fs.readFile(filePath, "utf-8");
181
+ }
182
+ async function fileToJsonl(filePath) {
183
+ const content = await extractText(filePath);
184
+ const stat = await fs.stat(filePath);
185
+ const timestamp = Math.floor(stat.mtimeMs);
186
+ const filename = path.basename(filePath);
187
+ const trimmed = content.trim();
188
+ if (trimmed.length < 50)
189
+ return [];
190
+ if (trimmed.length <= MAX_ITEM_CHARS) {
191
+ return [{ content: `[${filename}] ${trimmed}`, role: "user", timestamp }];
192
+ }
193
+ const chunks = chunkText(trimmed, TARGET_CHUNK_CHARS);
194
+ return chunks.map((chunk, i) => ({
195
+ content: `[${filename} part ${i + 1}/${chunks.length}] ${chunk}`,
196
+ role: "user",
197
+ timestamp,
198
+ }));
199
+ }
200
+ async function sessionToJsonl(filePath) {
201
+ const content = await fs.readFile(filePath, "utf-8");
202
+ const lines = [];
203
+ for (const line of content.split("\n")) {
204
+ if (!line.trim())
205
+ continue;
206
+ try {
207
+ const parsed = JSON.parse(line);
208
+ if (parsed.type !== "message")
209
+ continue;
210
+ const msg = parsed.message;
211
+ if (!msg || !msg.role)
212
+ continue;
213
+ if (msg.role !== "user" && msg.role !== "assistant")
214
+ continue;
215
+ let text = "";
216
+ if (typeof msg.content === "string") {
217
+ text = msg.content;
218
+ }
219
+ else if (Array.isArray(msg.content)) {
220
+ text = msg.content
221
+ .filter((block) => block.type === "text")
222
+ .map((block) => block.text)
223
+ .join("\n");
224
+ }
225
+ if (!text || text.trim().length < 20)
226
+ continue;
227
+ // Sanitize: strip memory injections, envelope metadata, media refs, system noise
228
+ text = sanitizeForUpload(text);
229
+ if (!text || text.trim().length < 20)
230
+ continue;
231
+ if (isSystemNoise(text))
232
+ continue;
233
+ if (isCompactionSummary(text, msg.role))
234
+ continue;
235
+ let timestamp;
236
+ if (typeof parsed.timestamp === "string") {
237
+ timestamp = new Date(parsed.timestamp).getTime();
238
+ }
239
+ else if (typeof parsed.timestamp === "number") {
240
+ timestamp = parsed.timestamp;
241
+ }
242
+ else {
243
+ timestamp = Date.now();
244
+ }
245
+ const trimmedText = text.trim();
246
+ // Chunk oversized messages to avoid blowing up the embedding service
247
+ if (trimmedText.length > MAX_ITEM_CHARS) {
248
+ const chunks = chunkText(trimmedText, TARGET_CHUNK_CHARS);
249
+ for (const chunk of chunks) {
250
+ lines.push({ content: chunk, role: msg.role, timestamp });
251
+ }
252
+ }
253
+ else {
254
+ lines.push({ content: trimmedText, role: msg.role, timestamp });
255
+ }
256
+ }
257
+ catch {
258
+ // Skip invalid lines
259
+ }
260
+ }
261
+ return lines;
262
+ }
263
+ async function exists(p) {
264
+ try {
265
+ await fs.access(p);
266
+ return true;
267
+ }
268
+ catch {
269
+ return false;
270
+ }
271
+ }
272
+ async function discoverWorkspaceFiles(workspacePath) {
273
+ const files = [];
274
+ const memoryMd = path.join(workspacePath, "MEMORY.md");
275
+ if (await exists(memoryMd))
276
+ files.push(memoryMd);
277
+ const memoryDir = path.join(workspacePath, "memory");
278
+ if (await exists(memoryDir)) {
279
+ const allMemFiles = await fs.readdir(memoryDir, { recursive: true });
280
+ const mdFiles = allMemFiles.filter((f) => {
281
+ const ext = path.extname(f).toLowerCase();
282
+ return DOC_EXTENSIONS.has(ext);
283
+ });
284
+ files.push(...mdFiles.map((f) => path.join(memoryDir, f)));
285
+ }
286
+ for (const contextFile of ["AGENTS.md", "TOOLS.md"]) {
287
+ const p = path.join(workspacePath, contextFile);
288
+ if (await exists(p))
289
+ files.push(p);
290
+ }
291
+ return files;
292
+ }
293
+ async function discoverBrainFiles(stateDir) {
294
+ const files = [];
295
+ const sessionsDir = path.join(stateDir, "agents", "main", "sessions");
296
+ if (await exists(sessionsDir)) {
297
+ const allSessionFiles = await fs.readdir(sessionsDir);
298
+ // Match both active sessions (.jsonl) and soft-deleted ones (.jsonl.deleted.*)
299
+ // OpenClaw renames old sessions instead of deleting them — the data is still valid
300
+ const sessionFiles = allSessionFiles.filter((f) => f.endsWith(".jsonl") || f.includes(".jsonl.deleted."));
301
+ files.push(...sessionFiles.map((f) => path.join(sessionsDir, f)));
302
+ }
303
+ return files;
304
+ }
305
+ async function fetchWithRetry(url, init, label) {
306
+ let lastError = null;
307
+ for (let attempt = 1; attempt <= MAX_HTTP_RETRIES; attempt++) {
308
+ try {
309
+ const res = await fetch(url, { ...init, signal: AbortSignal.timeout(30000) });
310
+ if (res.ok || res.status < 500)
311
+ return res;
312
+ lastError = new Error(`HTTP ${res.status}`);
313
+ }
314
+ catch (err) {
315
+ lastError = err instanceof Error ? err : new Error(String(err));
316
+ }
317
+ if (attempt < MAX_HTTP_RETRIES)
318
+ await sleep(1000 * attempt);
319
+ }
320
+ throw lastError ?? new Error(`${label}: failed after ${MAX_HTTP_RETRIES} attempts`);
321
+ }
322
+ export async function runUpload(params) {
323
+ const { memoryKey, endpoint, targetPath, stateDir, embeddings } = params;
324
+ const uploadUrl = `${endpoint}/v1/memory/upload`;
325
+ // Validate API reachability
326
+ try {
327
+ const res = await fetch(`${endpoint}/health`, { signal: AbortSignal.timeout(5000) });
328
+ if (!res.ok)
329
+ throw new Error(`HTTP ${res.status}`);
330
+ }
331
+ catch {
332
+ console.error("Error: Could not reach MemoryRouter API.");
333
+ return;
334
+ }
335
+ const workspacePath = params.workspacePath ?? process.cwd();
336
+ let files;
337
+ if (targetPath) {
338
+ const resolved = path.resolve(targetPath);
339
+ const stat = await fs.stat(resolved);
340
+ if (stat.isDirectory()) {
341
+ const allDirFiles = await fs.readdir(resolved, { recursive: true });
342
+ files = allDirFiles
343
+ .filter((f) => {
344
+ const ext = path.extname(f).toLowerCase();
345
+ return DOC_EXTENSIONS.has(ext) || ext === ".jsonl";
346
+ })
347
+ .map((f) => path.join(resolved, f));
348
+ }
349
+ else {
350
+ files = [resolved];
351
+ }
352
+ }
353
+ else if (params.hasBrainFlag && !params.hasWorkspaceFlag) {
354
+ // --brain only: upload sessions from brain path
355
+ files = await discoverBrainFiles(stateDir);
356
+ }
357
+ else if (params.hasWorkspaceFlag && !params.hasBrainFlag) {
358
+ // --workspace only: upload workspace files only
359
+ files = await discoverWorkspaceFiles(workspacePath);
360
+ }
361
+ else {
362
+ // No flags or both flags: upload both workspace + sessions
363
+ const wsFiles = await discoverWorkspaceFiles(workspacePath);
364
+ const brainFiles = await discoverBrainFiles(stateDir);
365
+ files = [...wsFiles, ...brainFiles];
366
+ }
367
+ if (files.length === 0) {
368
+ console.log("No files found to upload.");
369
+ return;
370
+ }
371
+ const modelLabel = embeddings ? `(model: ${embeddings})` : "(model: bge)";
372
+ console.log(`Uploading ${files.length} files to MemoryRouter ${modelLabel}...`);
373
+ const allLines = [];
374
+ let skippedEmpty = 0;
375
+ for (const file of files) {
376
+ const displayName = path.basename(file);
377
+ try {
378
+ const isSession = file.endsWith(".jsonl") || file.includes(".jsonl.deleted.");
379
+ const lines = isSession ? await sessionToJsonl(file) : await fileToJsonl(file);
380
+ if (lines.length === 0) {
381
+ skippedEmpty++;
382
+ continue;
383
+ }
384
+ allLines.push(...lines);
385
+ console.log(` ${displayName.padEnd(40, ".")} ✓ (${lines.length} chunks)`);
386
+ }
387
+ catch (err) {
388
+ console.log(` ${displayName.padEnd(40, ".")} ✗ ${err instanceof Error ? err.message : "Error"}`);
389
+ }
390
+ }
391
+ if (skippedEmpty > 0)
392
+ console.log(` Skipped ${skippedEmpty} empty files`);
393
+ if (allLines.length === 0) {
394
+ console.log("\nNo content to upload.");
395
+ return;
396
+ }
397
+ // Batch — smaller batches for Qwen (4096-dim vectors are 4x heavier)
398
+ const isQwen = embeddings && ['qwen', 'qwen3', 'qwen3-8b', 'qwen3-embedding', 'qwen3-embedding-8b'].includes(embeddings.toLowerCase());
399
+ const maxBatchCount = isQwen ? MAX_BATCH_COUNT_QWEN : MAX_BATCH_COUNT_DEFAULT;
400
+ const batches = [];
401
+ let currentBatch = [];
402
+ let currentBytes = 0;
403
+ for (const line of allLines) {
404
+ const lineBytes = JSON.stringify(line).length + 1;
405
+ if (currentBytes + lineBytes > MAX_BATCH_BYTES || currentBatch.length >= maxBatchCount) {
406
+ if (currentBatch.length > 0)
407
+ batches.push(currentBatch);
408
+ currentBatch = [line];
409
+ currentBytes = lineBytes;
410
+ }
411
+ else {
412
+ currentBatch.push(line);
413
+ currentBytes += lineBytes;
414
+ }
415
+ }
416
+ if (currentBatch.length > 0)
417
+ batches.push(currentBatch);
418
+ console.log(`\nSending ${allLines.length} chunks in ${batches.length} batches...`);
419
+ let totalProcessed = 0;
420
+ let totalFailed = 0;
421
+ for (let i = 0; i < batches.length; i++) {
422
+ if (i > 0)
423
+ await sleep(BATCH_SLEEP_MS);
424
+ const batch = batches[i];
425
+ const jsonlBody = batch.map((line) => JSON.stringify(line)).join("\n");
426
+ if (batches.length > 1) {
427
+ process.stdout.write(` Batch ${i + 1}/${batches.length} (${batch.length} items)... `);
428
+ }
429
+ try {
430
+ const response = await fetchWithRetry(uploadUrl, {
431
+ method: "POST",
432
+ headers: {
433
+ Authorization: `Bearer ${memoryKey}`,
434
+ "Content-Type": "text/plain",
435
+ ...(embeddings && { "X-Embedding-Model": embeddings }),
436
+ },
437
+ body: jsonlBody,
438
+ }, `Batch ${i + 1}`);
439
+ const result = (await response.json());
440
+ const batchStored = result.stats?.stored ?? result.stats?.inputItems ?? batch.length;
441
+ const batchFailed = result.stats?.failed ?? 0;
442
+ totalProcessed += batchStored;
443
+ totalFailed += batchFailed;
444
+ if (batchFailed > 0) {
445
+ const errHint = result.errors?.[0] ? ` (${result.errors[0].slice(0, 120)})` : "";
446
+ console.log(`⚠ ${batchStored} stored, ${batchFailed} skipped${errHint}`);
447
+ // Show detailed error diagnostics (up to 5 per batch)
448
+ if (result.errors && result.errors.length > 1) {
449
+ for (const err of result.errors.slice(0, 5)) {
450
+ console.log(` → ${err.slice(0, 200)}`);
451
+ }
452
+ if (result.errors.length > 5)
453
+ console.log(` → ... and ${result.errors.length - 5} more`);
454
+ }
455
+ }
456
+ else {
457
+ console.log(`✓ ${batchStored} stored`);
458
+ }
459
+ }
460
+ catch (err) {
461
+ console.log(`✗ Failed: ${err instanceof Error ? err.message : String(err)}`);
462
+ totalFailed += batch.length;
463
+ }
464
+ }
465
+ console.log(`\n✅ ${totalProcessed} memories stored in vault`);
466
+ if (totalFailed > 0) {
467
+ console.log(`⚠️ ${totalFailed} failed (${((totalFailed / (totalProcessed + totalFailed)) * 100).toFixed(0)}%)`);
468
+ }
469
+ }