openclaw-cortex-memory 0.1.0-Alpha.3 → 0.1.0-Alpha.31

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (110) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +296 -203
  3. package/SIGNATURE.md +7 -0
  4. package/SKILL.md +92 -268
  5. package/dist/index.d.ts +100 -22
  6. package/dist/index.d.ts.map +1 -1
  7. package/dist/index.js +1249 -1252
  8. package/dist/index.js.map +1 -1
  9. package/dist/openclaw.plugin.json +501 -16
  10. package/dist/src/dedup/three_stage_deduplicator.d.ts +25 -0
  11. package/dist/src/dedup/three_stage_deduplicator.d.ts.map +1 -0
  12. package/dist/src/dedup/three_stage_deduplicator.js +224 -0
  13. package/dist/src/dedup/three_stage_deduplicator.js.map +1 -0
  14. package/dist/src/engine/memory_engine.d.ts +6 -1
  15. package/dist/src/engine/memory_engine.d.ts.map +1 -1
  16. package/dist/src/engine/ts_engine.d.ts +242 -0
  17. package/dist/src/engine/ts_engine.d.ts.map +1 -1
  18. package/dist/src/engine/ts_engine.js +1468 -52
  19. package/dist/src/engine/ts_engine.js.map +1 -1
  20. package/dist/src/engine/types.d.ts +29 -0
  21. package/dist/src/engine/types.d.ts.map +1 -1
  22. package/dist/src/graph/ontology.d.ts +125 -0
  23. package/dist/src/graph/ontology.d.ts.map +1 -0
  24. package/dist/src/graph/ontology.js +1237 -0
  25. package/dist/src/graph/ontology.js.map +1 -0
  26. package/dist/src/net/http_post.d.ts +17 -0
  27. package/dist/src/net/http_post.d.ts.map +1 -0
  28. package/dist/src/net/http_post.js +56 -0
  29. package/dist/src/net/http_post.js.map +1 -0
  30. package/dist/src/quality/llm_output_validator.d.ts +66 -0
  31. package/dist/src/quality/llm_output_validator.d.ts.map +1 -0
  32. package/dist/src/quality/llm_output_validator.js +659 -0
  33. package/dist/src/quality/llm_output_validator.js.map +1 -0
  34. package/dist/src/reflect/reflector.d.ts +7 -0
  35. package/dist/src/reflect/reflector.d.ts.map +1 -1
  36. package/dist/src/reflect/reflector.js +352 -8
  37. package/dist/src/reflect/reflector.js.map +1 -1
  38. package/dist/src/rules/rule_store.d.ts.map +1 -1
  39. package/dist/src/rules/rule_store.js +75 -16
  40. package/dist/src/rules/rule_store.js.map +1 -1
  41. package/dist/src/session/session_end.d.ts +33 -0
  42. package/dist/src/session/session_end.d.ts.map +1 -1
  43. package/dist/src/session/session_end.js +67 -64
  44. package/dist/src/session/session_end.js.map +1 -1
  45. package/dist/src/store/archive_store.d.ts +136 -0
  46. package/dist/src/store/archive_store.d.ts.map +1 -0
  47. package/dist/src/store/archive_store.js +635 -0
  48. package/dist/src/store/archive_store.js.map +1 -0
  49. package/dist/src/store/embedding_utils.d.ts +32 -0
  50. package/dist/src/store/embedding_utils.d.ts.map +1 -0
  51. package/dist/src/store/embedding_utils.js +173 -0
  52. package/dist/src/store/embedding_utils.js.map +1 -0
  53. package/dist/src/store/graph_memory_store.d.ts +114 -0
  54. package/dist/src/store/graph_memory_store.d.ts.map +1 -0
  55. package/dist/src/store/graph_memory_store.js +841 -0
  56. package/dist/src/store/graph_memory_store.js.map +1 -0
  57. package/dist/src/store/read_store.d.ts +89 -0
  58. package/dist/src/store/read_store.d.ts.map +1 -1
  59. package/dist/src/store/read_store.js +2459 -28
  60. package/dist/src/store/read_store.js.map +1 -1
  61. package/dist/src/store/vector_store.d.ts +45 -0
  62. package/dist/src/store/vector_store.d.ts.map +1 -0
  63. package/dist/src/store/vector_store.js +202 -0
  64. package/dist/src/store/vector_store.js.map +1 -0
  65. package/dist/src/store/write_store.d.ts +54 -0
  66. package/dist/src/store/write_store.d.ts.map +1 -1
  67. package/dist/src/store/write_store.js +284 -6
  68. package/dist/src/store/write_store.js.map +1 -1
  69. package/dist/src/sync/session_sync.d.ts +119 -2
  70. package/dist/src/sync/session_sync.d.ts.map +1 -1
  71. package/dist/src/sync/session_sync.js +2377 -31
  72. package/dist/src/sync/session_sync.js.map +1 -1
  73. package/dist/src/utils/runtime_env.d.ts +4 -0
  74. package/dist/src/utils/runtime_env.d.ts.map +1 -0
  75. package/dist/src/utils/runtime_env.js +51 -0
  76. package/dist/src/utils/runtime_env.js.map +1 -0
  77. package/dist/src/wiki/wiki_linter.d.ts +25 -0
  78. package/dist/src/wiki/wiki_linter.d.ts.map +1 -0
  79. package/dist/src/wiki/wiki_linter.js +268 -0
  80. package/dist/src/wiki/wiki_linter.js.map +1 -0
  81. package/dist/src/wiki/wiki_logger.d.ts +10 -0
  82. package/dist/src/wiki/wiki_logger.d.ts.map +1 -0
  83. package/dist/src/wiki/wiki_logger.js +78 -0
  84. package/dist/src/wiki/wiki_logger.js.map +1 -0
  85. package/dist/src/wiki/wiki_maintainer.d.ts +36 -0
  86. package/dist/src/wiki/wiki_maintainer.d.ts.map +1 -0
  87. package/dist/src/wiki/wiki_maintainer.js +38 -0
  88. package/dist/src/wiki/wiki_maintainer.js.map +1 -0
  89. package/dist/src/wiki/wiki_projector.d.ts +33 -0
  90. package/dist/src/wiki/wiki_projector.d.ts.map +1 -0
  91. package/dist/src/wiki/wiki_projector.js +633 -0
  92. package/dist/src/wiki/wiki_projector.js.map +1 -0
  93. package/dist/src/wiki/wiki_queue.d.ts +29 -0
  94. package/dist/src/wiki/wiki_queue.d.ts.map +1 -0
  95. package/dist/src/wiki/wiki_queue.js +137 -0
  96. package/dist/src/wiki/wiki_queue.js.map +1 -0
  97. package/openclaw.plugin.json +501 -16
  98. package/package.json +58 -7
  99. package/schema/graph.schema.yaml +330 -0
  100. package/scripts/cli.js +19 -14
  101. package/scripts/repair-memory.js +321 -0
  102. package/scripts/uninstall.js +22 -5
  103. package/skills/cortex-memory/SKILL.md +49 -0
  104. package/skills/cortex-memory/references/agent-manual.md +115 -0
  105. package/skills/cortex-memory/references/configuration.md +92 -0
  106. package/skills/cortex-memory/references/publish-checklist.md +46 -0
  107. package/skills/cortex-memory/references/system-prompt-template.md +27 -0
  108. package/skills/cortex-memory/references/tools.md +181 -0
  109. package/skills/cortex-memory/scripts/smoke-check.ps1 +56 -0
  110. package/index.ts +0 -2142
@@ -0,0 +1,635 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || (function () {
19
+ var ownKeys = function(o) {
20
+ ownKeys = Object.getOwnPropertyNames || function (o) {
21
+ var ar = [];
22
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
+ return ar;
24
+ };
25
+ return ownKeys(o);
26
+ };
27
+ return function (mod) {
28
+ if (mod && mod.__esModule) return mod;
29
+ var result = {};
30
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
+ __setModuleDefault(result, mod);
32
+ return result;
33
+ };
34
+ })();
35
+ Object.defineProperty(exports, "__esModule", { value: true });
36
+ exports.createArchiveStore = createArchiveStore;
37
+ const fs = __importStar(require("fs"));
38
+ const path = __importStar(require("path"));
39
+ const http_post_1 = require("../net/http_post");
40
+ const ontology_1 = require("../graph/ontology");
41
+ const llm_output_validator_1 = require("../quality/llm_output_validator");
42
+ function normalizeBaseUrl(value) {
43
+ if (!value)
44
+ return "";
45
+ return value.endsWith("/") ? value.slice(0, -1) : value;
46
+ }
47
+ function resolveArchiveSourceCharLimit(value) {
48
+ if (typeof value === "number" && Number.isFinite(value)) {
49
+ return Math.max(1000, Math.floor(value));
50
+ }
51
+ return 500000;
52
+ }
53
+ function clampTailText(text, maxChars) {
54
+ const source = (text || "").trim();
55
+ if (!source)
56
+ return "";
57
+ if (!Number.isFinite(maxChars) || maxChars <= 0 || source.length <= maxChars) {
58
+ return source;
59
+ }
60
+ return source.slice(-Math.floor(maxChars)).trim();
61
+ }
62
+ const ARCHIVE_LOW_INFORMATION_LINE = /^(ok|okay|got it|roger|noted|sure|thanks|thank you|received|copy that|understood)\b/i;
63
+ function denoiseArchiveSourceText(text) {
64
+ const raw = (text || "").trim();
65
+ if (!raw)
66
+ return "";
67
+ const output = [];
68
+ const seen = new Set();
69
+ for (const line of raw.split(/\r?\n/)) {
70
+ const trimmed = line.trim();
71
+ if (!trimmed)
72
+ continue;
73
+ const content = trimmed.replace(/^\[[^\]]+\]\s*/, "").trim();
74
+ if (!content)
75
+ continue;
76
+ const hasSignal = /(https?:\/\/|www\.|[A-Za-z0-9._-]+\.[A-Za-z]{2,}|[`#/:\\]|@\w+|\b\d{2,}\b)/.test(content);
77
+ if (!hasSignal && ARCHIVE_LOW_INFORMATION_LINE.test(content)) {
78
+ continue;
79
+ }
80
+ const dedupKey = content.toLowerCase();
81
+ if (!hasSignal && seen.has(dedupKey)) {
82
+ continue;
83
+ }
84
+ seen.add(dedupKey);
85
+ output.push(trimmed);
86
+ }
87
+ return output.length > 0 ? output.join("\n") : raw;
88
+ }
89
+ const TASK_INSTRUCTION_PATTERNS = [
90
+ /请|帮我|麻烦|需要|任务|需求|实现|修复|排查|优化|上线|部署|整理|编写|启用|查看/i,
91
+ /please|can you|need to|task|implement|fix|investigate|optimi[sz]e|deploy|enable|review/i,
92
+ ];
93
+ const COMPLETION_REPORT_PATTERNS = [
94
+ /已完成|完成了|处理完|搞定|已修复|修复了|已实现|已上线|已部署|结果|汇报|完成情况|报告/i,
95
+ /done|completed|fixed|implemented|deployed|resolved|report|summary|finished/i,
96
+ ];
97
+ const USER_ACCEPTANCE_PATTERNS = [
98
+ /确认|认可|通过|验收|OK|可以|好的|收到|辛苦|谢谢|没问题|就这样/i,
99
+ /approved|accepted|looks good|great|works|thank you|confirmed/i,
100
+ ];
101
+ const ACTION_PATTERNS = [
102
+ /决定|完成|修复|发布|上线|部署|提交|交付|验证|关闭|推进|落地|实施|启用/i,
103
+ /decide|complete|fix|release|deploy|ship|deliver|verify|close|implement|enable|migrate/i,
104
+ ];
105
+ const FAILURE_PATTERNS = [
106
+ /失败|报错|错误|异常|阻塞|卡住|不行|超时|回滚|故障/i,
107
+ /failed|error|exception|blocked|timeout|rollback|incident/i,
108
+ ];
109
+ const SUCCESS_PATTERNS = [
110
+ /成功|完成|修复|解决|通过|已上线|稳定|正常|恢复/i,
111
+ /success|completed|fixed|resolved|passed|stable|recovered|works/i,
112
+ ];
113
+ function matchesAnyPattern(text, patterns) {
114
+ return patterns.some(pattern => pattern.test(text));
115
+ }
116
+ function firstMatchIndex(text, patterns) {
117
+ let minIndex = -1;
118
+ for (const pattern of patterns) {
119
+ const idx = text.search(pattern);
120
+ if (idx < 0) {
121
+ continue;
122
+ }
123
+ if (minIndex < 0 || idx < minIndex) {
124
+ minIndex = idx;
125
+ }
126
+ }
127
+ return minIndex;
128
+ }
129
+ function scoreQuality(args) {
130
+ const summary = (args.summary || "").trim();
131
+ const cause = (args.cause || "").trim();
132
+ const process = (args.process || "").trim();
133
+ const result = (args.result || "").trim();
134
+ const outcome = (args.outcome || "").trim();
135
+ const sourceText = (args.sourceText || "").trim();
136
+ const mergedText = [summary, cause, process, result, outcome, sourceText].filter(Boolean).join("\n");
137
+ const hasStructuredTriplet = cause.length > 0 && process.length > 0 && result.length > 0;
138
+ const hasTaskInstruction = matchesAnyPattern(mergedText, TASK_INSTRUCTION_PATTERNS);
139
+ const hasCompletionReport = matchesAnyPattern(mergedText, COMPLETION_REPORT_PATTERNS);
140
+ const hasUserAcceptance = matchesAnyPattern(mergedText, USER_ACCEPTANCE_PATTERNS);
141
+ const hasAction = matchesAnyPattern(mergedText, ACTION_PATTERNS);
142
+ const hasFailure = matchesAnyPattern(mergedText, FAILURE_PATTERNS);
143
+ const hasSuccess = matchesAnyPattern(mergedText, SUCCESS_PATTERNS);
144
+ const hasOutcome = outcome.length >= 6 || hasSuccess;
145
+ const firstFailureIdx = hasFailure ? firstMatchIndex(mergedText, FAILURE_PATTERNS) : -1;
146
+ const firstSuccessIdx = hasSuccess ? firstMatchIndex(mergedText, SUCCESS_PATTERNS) : -1;
147
+ const failThenSuccess = hasFailure && hasSuccess && firstFailureIdx >= 0 && firstSuccessIdx > firstFailureIdx;
148
+ const workflowComplete = hasStructuredTriplet || (hasTaskInstruction && hasCompletionReport && hasUserAcceptance);
149
+ let score = 0;
150
+ if (summary.length >= 24)
151
+ score += 0.1;
152
+ if (summary.length >= 60)
153
+ score += 0.1;
154
+ if (summary.length >= 120)
155
+ score += 0.06;
156
+ if (summary.length >= 180)
157
+ score += 0.04;
158
+ if (hasStructuredTriplet)
159
+ score += 0.22;
160
+ if (hasAction)
161
+ score += 0.14;
162
+ if (hasOutcome)
163
+ score += 0.12;
164
+ if (hasTaskInstruction)
165
+ score += 0.12;
166
+ if (hasCompletionReport)
167
+ score += 0.12;
168
+ if (hasUserAcceptance)
169
+ score += 0.14;
170
+ if (workflowComplete)
171
+ score += 0.12;
172
+ if (failThenSuccess)
173
+ score += 0.1;
174
+ const normalizedScore = Math.max(0, Math.min(1, Number(score.toFixed(2))));
175
+ if (normalizedScore >= 0.75) {
176
+ return {
177
+ score: normalizedScore,
178
+ level: "high",
179
+ signals: { hasStructuredTriplet, hasTaskInstruction, hasCompletionReport, hasUserAcceptance, workflowComplete, failThenSuccess },
180
+ };
181
+ }
182
+ if (normalizedScore >= 0.4) {
183
+ return {
184
+ score: normalizedScore,
185
+ level: "medium",
186
+ signals: { hasStructuredTriplet, hasTaskInstruction, hasCompletionReport, hasUserAcceptance, workflowComplete, failThenSuccess },
187
+ };
188
+ }
189
+ return {
190
+ score: normalizedScore,
191
+ level: "low",
192
+ signals: { hasStructuredTriplet, hasTaskInstruction, hasCompletionReport, hasUserAcceptance, workflowComplete, failThenSuccess },
193
+ };
194
+ }
195
+ async function requestEmbedding(args) {
196
+ const endpoint = args.baseUrl.endsWith("/embeddings") ? args.baseUrl : `${args.baseUrl}/embeddings`;
197
+ const body = {
198
+ input: args.text,
199
+ model: args.model,
200
+ };
201
+ if (typeof args.dimensions === "number" && Number.isFinite(args.dimensions) && args.dimensions > 0) {
202
+ body.dimensions = args.dimensions;
203
+ }
204
+ const timeoutMs = typeof args.timeoutMs === "number" && Number.isFinite(args.timeoutMs) && args.timeoutMs >= 1000
205
+ ? Math.floor(args.timeoutMs)
206
+ : 20000;
207
+ const maxRetries = typeof args.maxRetries === "number" && Number.isFinite(args.maxRetries) && args.maxRetries >= 1
208
+ ? Math.min(8, Math.floor(args.maxRetries))
209
+ : 4;
210
+ let lastError = null;
211
+ for (let attempt = 0; attempt < maxRetries; attempt += 1) {
212
+ const response = await (0, http_post_1.postJsonWithTimeout)({
213
+ endpoint,
214
+ apiKey: args.apiKey,
215
+ body,
216
+ timeoutMs,
217
+ });
218
+ if (!response.ok) {
219
+ lastError = new Error(response.status > 0 ? `embedding_http_${response.status}` : (response.error || "embedding_network_error"));
220
+ continue;
221
+ }
222
+ try {
223
+ const json = (response.json || {});
224
+ const embedding = json?.data?.[0]?.embedding;
225
+ if (Array.isArray(embedding) && embedding.length > 0) {
226
+ return embedding.filter(item => Number.isFinite(item));
227
+ }
228
+ lastError = new Error("embedding_empty");
229
+ }
230
+ catch (error) {
231
+ lastError = error;
232
+ }
233
+ if (attempt < maxRetries - 1) {
234
+ await new Promise(resolve => setTimeout(resolve, 300 * Math.pow(2, attempt)));
235
+ }
236
+ }
237
+ if (lastError) {
238
+ throw lastError;
239
+ }
240
+ return null;
241
+ }
242
+ function ensureDirForFile(filePath) {
243
+ const dir = path.dirname(filePath);
244
+ if (!fs.existsSync(dir)) {
245
+ fs.mkdirSync(dir, { recursive: true });
246
+ }
247
+ }
248
+ function estimateTokenCount(text) {
249
+ const parts = text
250
+ .split(/[\s,.;:!?,。;:!?、()()[\]{}"'`~]+/)
251
+ .map(part => part.trim())
252
+ .filter(Boolean);
253
+ return parts.length;
254
+ }
255
+ function inferGateSource(event) {
256
+ const sourceFile = (event.source_file || "").toLowerCase();
257
+ const actor = (event.actor || "").toLowerCase();
258
+ if (sourceFile.includes("session_end") || actor.includes("session_end")) {
259
+ return "session_end";
260
+ }
261
+ if (sourceFile.includes("sync") || actor.includes("sync")) {
262
+ return "sync";
263
+ }
264
+ return "manual";
265
+ }
266
+ function splitTextChunks(text, chunkSize, chunkOverlap) {
267
+ const normalizedSize = Number.isFinite(chunkSize) && chunkSize >= 200 ? Math.floor(chunkSize) : 600;
268
+ const normalizedOverlap = Number.isFinite(chunkOverlap) && chunkOverlap >= 0
269
+ ? Math.floor(chunkOverlap)
270
+ : 100;
271
+ const overlap = Math.min(normalizedOverlap, Math.max(0, normalizedSize - 50));
272
+ const output = [];
273
+ let cursor = 0;
274
+ let index = 0;
275
+ const punctuationSet = new Set(["。", "!", "?", ".", "!", "?", "\n", ";", ";"]);
276
+ while (cursor < text.length) {
277
+ const rawEnd = Math.min(text.length, cursor + normalizedSize);
278
+ let end = rawEnd;
279
+ if (rawEnd < text.length) {
280
+ const backwardStart = Math.max(cursor + Math.floor(normalizedSize * 0.45), cursor + 1);
281
+ let found = -1;
282
+ for (let i = rawEnd - 1; i >= backwardStart; i -= 1) {
283
+ if (punctuationSet.has(text[i])) {
284
+ found = i + 1;
285
+ break;
286
+ }
287
+ }
288
+ if (found < 0) {
289
+ const forwardEnd = Math.min(text.length, rawEnd + Math.floor(normalizedSize * 0.2));
290
+ for (let i = rawEnd; i < forwardEnd; i += 1) {
291
+ if (punctuationSet.has(text[i])) {
292
+ found = i + 1;
293
+ break;
294
+ }
295
+ }
296
+ }
297
+ if (found > cursor) {
298
+ end = found;
299
+ }
300
+ }
301
+ if (end <= cursor) {
302
+ end = Math.min(text.length, cursor + normalizedSize);
303
+ }
304
+ const chunkText = text.slice(cursor, end).trim();
305
+ if (chunkText) {
306
+ output.push({ index, start: cursor, end, text: chunkText });
307
+ index += 1;
308
+ }
309
+ if (end >= text.length) {
310
+ break;
311
+ }
312
+ const nextCursor = Math.max(cursor + 1, end - overlap);
313
+ cursor = nextCursor <= cursor ? end : nextCursor;
314
+ }
315
+ return output;
316
+ }
317
+ function pickEvidenceChunks(chunks, maxCount) {
318
+ if (!chunks.length || maxCount <= 0)
319
+ return [];
320
+ if (chunks.length <= maxCount)
321
+ return chunks;
322
+ const picked = new Map();
323
+ picked.set(chunks[0].index, chunks[0]);
324
+ if (maxCount >= 2) {
325
+ const mid = chunks[Math.floor(chunks.length / 2)];
326
+ picked.set(mid.index, mid);
327
+ }
328
+ if (maxCount >= 3) {
329
+ const last = chunks[chunks.length - 1];
330
+ picked.set(last.index, last);
331
+ }
332
+ if (picked.size < maxCount) {
333
+ for (const chunk of chunks) {
334
+ if (!picked.has(chunk.index)) {
335
+ picked.set(chunk.index, chunk);
336
+ }
337
+ if (picked.size >= maxCount)
338
+ break;
339
+ }
340
+ }
341
+ return [...picked.values()].sort((a, b) => a.index - b.index).slice(0, maxCount);
342
+ }
343
+ async function mapWithConcurrency(items, maxConcurrency, mapper) {
344
+ if (items.length === 0) {
345
+ return [];
346
+ }
347
+ const concurrency = Math.max(1, Math.min(maxConcurrency, items.length));
348
+ const results = new Array(items.length);
349
+ let cursor = 0;
350
+ async function worker() {
351
+ while (true) {
352
+ const current = cursor;
353
+ cursor += 1;
354
+ if (current >= items.length) {
355
+ break;
356
+ }
357
+ results[current] = await mapper(items[current], current);
358
+ }
359
+ }
360
+ await Promise.all(Array.from({ length: concurrency }, () => worker()));
361
+ return results;
362
+ }
363
+ function createArchiveStore(options) {
364
+ const archivePath = path.join(options.memoryRoot, "sessions", "archive", "sessions.jsonl");
365
+ const mutationLogPath = path.join(options.memoryRoot, "sessions", "archive", "mutation_log.jsonl");
366
+ const graphSchema = (0, ontology_1.loadGraphSchema)(options.projectRoot);
367
+ const archiveSourceTextMaxChars = resolveArchiveSourceCharLimit(options.writePolicy?.archiveSourceTextMaxChars);
368
+ async function storeEvents(events) {
369
+ const stored = [];
370
+ const skipped = [];
371
+ if (!events.length) {
372
+ return { stored, skipped };
373
+ }
374
+ const lines = [];
375
+ const mutationLines = [];
376
+ for (const event of events) {
377
+ const summary = (event.summary || "").trim();
378
+ if (!summary) {
379
+ skipped.push({ summary: "", reason: "empty_summary" });
380
+ options.logger.info("archive_skip reason=empty_summary");
381
+ continue;
382
+ }
383
+ const cause = (event.cause || "").trim();
384
+ const process = (event.process || "").trim();
385
+ const result = (event.result || event.outcome || "").trim();
386
+ const confidence = typeof event.confidence === "number"
387
+ ? Math.max(0, Math.min(1, event.confidence))
388
+ : undefined;
389
+ const quality = scoreQuality({
390
+ summary,
391
+ cause,
392
+ process,
393
+ result,
394
+ outcome: event.outcome,
395
+ sourceText: event.source_text,
396
+ });
397
+ const gateSource = inferGateSource(event);
398
+ const lifecycleComplete = quality.signals.workflowComplete;
399
+ if (gateSource === "sync" && !quality.signals.hasStructuredTriplet) {
400
+ skipped.push({ summary, reason: "incomplete_cause_process_result" });
401
+ options.logger.info("archive_skip reason=incomplete_cause_process_result gate_source=sync");
402
+ continue;
403
+ }
404
+ const archiveMinConfidence = typeof options.writePolicy?.archiveMinConfidence === "number"
405
+ ? Math.max(0, Math.min(1, options.writePolicy.archiveMinConfidence))
406
+ : 0.35;
407
+ if (typeof confidence === "number" && confidence < archiveMinConfidence) {
408
+ if (!lifecycleComplete) {
409
+ skipped.push({ summary, reason: "low_confidence" });
410
+ options.logger.info("archive_skip reason=filtered_low_quality detail=low_confidence");
411
+ continue;
412
+ }
413
+ options.logger.info(`archive_confidence_override reason=workflow_complete confidence=${confidence.toFixed(2)} threshold=${archiveMinConfidence.toFixed(2)}`);
414
+ }
415
+ const archiveMinQualityScore = typeof options.writePolicy?.archiveMinQualityScore === "number"
416
+ ? Math.max(0, Math.min(1, options.writePolicy.archiveMinQualityScore))
417
+ : 0.4;
418
+ if (quality.score < archiveMinQualityScore) {
419
+ if (!lifecycleComplete) {
420
+ skipped.push({ summary, reason: "low_quality" });
421
+ options.logger.info("archive_skip reason=filtered_low_quality detail=low_quality");
422
+ continue;
423
+ }
424
+ options.logger.info(`archive_quality_override reason=workflow_complete quality=${quality.score.toFixed(2)} threshold=${archiveMinQualityScore.toFixed(2)}`);
425
+ }
426
+ const normalizedEventType = (0, ontology_1.normalizeEventType)(event.event_type || "insight", graphSchema);
427
+ const id = `evt_${Date.now().toString(36)}_${Math.random().toString(36).slice(2, 8)}`;
428
+ const sourceTextRaw = typeof event.source_text === "string" ? event.source_text : "";
429
+ const sourceText = clampTailText(denoiseArchiveSourceText(sourceTextRaw), archiveSourceTextMaxChars);
430
+ const dedupText = [normalizedEventType, summary, sourceText].filter(Boolean).join("\n");
431
+ const dedup = options.deduplicator.check({
432
+ id,
433
+ summary: dedupText || `${normalizedEventType}: ${summary}`,
434
+ });
435
+ if (dedup.duplicate) {
436
+ skipped.push({ summary, reason: `duplicate_${dedup.stage || "unknown"}` });
437
+ options.logger.info(`archive_skip reason=duplicate_dedup_stage_${dedup.stage || "unknown"}`);
438
+ continue;
439
+ }
440
+ const record = {
441
+ id,
442
+ timestamp: new Date().toISOString(),
443
+ layer: "archive",
444
+ event_type: normalizedEventType,
445
+ summary,
446
+ cause,
447
+ process,
448
+ result,
449
+ source_text: sourceText || undefined,
450
+ outcome: event.outcome,
451
+ session_id: event.session_id,
452
+ source_file: event.source_file,
453
+ gate_source: gateSource,
454
+ embedding_status: "pending",
455
+ quality_score: quality.score,
456
+ quality_level: quality.level,
457
+ char_count: (sourceText || summary).length,
458
+ token_count: estimateTokenCount(sourceText || summary),
459
+ vector_chunks_total: 0,
460
+ vector_chunks_ok: 0,
461
+ confidence,
462
+ source_event_id: event.source_event_id,
463
+ actor: event.actor || "system",
464
+ canonical_id: event.canonical_id || (0, ontology_1.buildCanonicalId)({
465
+ eventType: normalizedEventType,
466
+ summary,
467
+ outcome: event.outcome,
468
+ }),
469
+ };
470
+ let embedding = undefined;
471
+ const vectorUpsertRows = [];
472
+ const embeddingModel = options.embedding?.model || "";
473
+ const embeddingApiKey = options.embedding?.apiKey || "";
474
+ const embeddingBaseUrl = normalizeBaseUrl(options.embedding?.baseURL || options.embedding?.baseUrl);
475
+ const maxParallel = 6;
476
+ if (embeddingModel && embeddingApiKey && embeddingBaseUrl) {
477
+ const chunkSize = options.vectorChunking?.chunkSize ?? 600;
478
+ const chunkOverlap = options.vectorChunking?.chunkOverlap ?? 100;
479
+ const evidenceMaxChunks = typeof options.vectorChunking?.evidenceMaxChunks === "number"
480
+ ? Math.max(0, Math.min(8, Math.floor(options.vectorChunking.evidenceMaxChunks)))
481
+ : 2;
482
+ const summaryText = (record.summary || "").trim();
483
+ const evidenceChunks = record.source_text
484
+ ? pickEvidenceChunks(splitTextChunks(record.source_text, chunkSize, chunkOverlap), evidenceMaxChunks)
485
+ : [];
486
+ const summaryChunk = summaryText
487
+ ? [
488
+ {
489
+ text: summaryText,
490
+ source_field: "summary",
491
+ index: 0,
492
+ total: 1 + evidenceChunks.length,
493
+ start: 0,
494
+ end: summaryText.length,
495
+ },
496
+ ]
497
+ : [];
498
+ const embeddingInputs = [
499
+ ...summaryChunk,
500
+ ...evidenceChunks.map((chunk, idx) => ({
501
+ text: chunk.text,
502
+ source_field: "evidence",
503
+ index: idx + summaryChunk.length,
504
+ total: summaryChunk.length + evidenceChunks.length,
505
+ start: chunk.start,
506
+ end: chunk.end,
507
+ })),
508
+ ];
509
+ record.vector_chunks_total = embeddingInputs.length;
510
+ const chunkEmbeddings = await mapWithConcurrency(embeddingInputs, maxParallel, async (chunk) => {
511
+ try {
512
+ const chunkEmbedding = await requestEmbedding({
513
+ text: chunk.text,
514
+ model: embeddingModel,
515
+ apiKey: embeddingApiKey,
516
+ baseUrl: embeddingBaseUrl,
517
+ dimensions: options.embedding?.dimensions,
518
+ timeoutMs: options.embedding?.timeoutMs,
519
+ maxRetries: options.embedding?.maxRetries,
520
+ }) || undefined;
521
+ if (chunkEmbedding && chunkEmbedding.length > 0) {
522
+ return {
523
+ chunk,
524
+ embedding: chunkEmbedding,
525
+ };
526
+ }
527
+ return null;
528
+ }
529
+ catch (error) {
530
+ options.logger.warn(`Archive chunk embedding failed id=${id} chunk=${chunk.index} field=${chunk.source_field} error=${error}`);
531
+ return null;
532
+ }
533
+ });
534
+ const validEmbeddings = chunkEmbeddings
535
+ .filter((item) => Boolean(item))
536
+ .sort((a, b) => a.chunk.index - b.chunk.index);
537
+ const primary = validEmbeddings.find(item => item.chunk.source_field === "summary");
538
+ if (primary) {
539
+ embedding = primary.embedding;
540
+ }
541
+ else if (validEmbeddings.length > 0) {
542
+ embedding = validEmbeddings[0].embedding;
543
+ }
544
+ for (const item of validEmbeddings) {
545
+ vectorUpsertRows.push({
546
+ id: `${id}_c${item.chunk.index}`,
547
+ summary: item.chunk.text,
548
+ embedding: item.embedding,
549
+ source_field: item.chunk.source_field,
550
+ chunk_index: item.chunk.index,
551
+ chunk_total: item.chunk.total,
552
+ chunk_start: item.chunk.start,
553
+ chunk_end: item.chunk.end,
554
+ });
555
+ }
556
+ record.vector_chunks_ok = validEmbeddings.length;
557
+ record.embedding_status = record.vector_chunks_total > 0 && record.vector_chunks_ok === record.vector_chunks_total
558
+ ? "ok"
559
+ : "failed";
560
+ }
561
+ record.embedding = embedding;
562
+ lines.push(JSON.stringify(record));
563
+ stored.push(record);
564
+ options.deduplicator.append({
565
+ id: record.id,
566
+ summary: dedupText || `${record.event_type}: ${summary}`,
567
+ embedding: embedding,
568
+ });
569
+ mutationLines.push(JSON.stringify({
570
+ op: "insert_event",
571
+ id: record.id,
572
+ canonical_id: record.canonical_id,
573
+ source_event_id: record.source_event_id || "",
574
+ actor: record.actor || "system",
575
+ timestamp: record.timestamp,
576
+ event_type: record.event_type,
577
+ summary: record.summary,
578
+ }));
579
+ options.logger.info(`archive_write reason=archived_success gate_source=${record.gate_source} id=${record.id}`);
580
+ if (vectorUpsertRows.length > 0) {
581
+ await options.vectorStore.deleteBySourceMemory({ layer: "archive", sourceMemoryId: record.id });
582
+ const upsertResults = await mapWithConcurrency(vectorUpsertRows, maxParallel, async (chunkRow) => {
583
+ try {
584
+ await options.vectorStore.upsert({
585
+ id: chunkRow.id,
586
+ session_id: record.session_id,
587
+ event_type: record.event_type,
588
+ summary: chunkRow.summary,
589
+ timestamp: record.timestamp,
590
+ outcome: record.outcome,
591
+ embedding: chunkRow.embedding,
592
+ quality_score: record.quality_score,
593
+ layer: "archive",
594
+ source_memory_id: record.id,
595
+ source_memory_canonical_id: record.canonical_id,
596
+ source_event_id: record.source_event_id || record.id,
597
+ source_field: chunkRow.source_field,
598
+ char_count: chunkRow.summary.length,
599
+ token_count: estimateTokenCount(chunkRow.summary),
600
+ chunk_index: chunkRow.chunk_index,
601
+ chunk_total: chunkRow.chunk_total,
602
+ chunk_start: chunkRow.chunk_start,
603
+ chunk_end: chunkRow.chunk_end,
604
+ });
605
+ return true;
606
+ }
607
+ catch (error) {
608
+ options.logger.warn(`Archive chunk upsert failed id=${record.id} chunk=${chunkRow.chunk_index} error=${error}`);
609
+ return false;
610
+ }
611
+ });
612
+ const upsertOk = upsertResults.filter(Boolean).length;
613
+ if (upsertOk !== vectorUpsertRows.length) {
614
+ options.logger.warn(`archive_vector_upsert_partial id=${record.id} ok=${upsertOk}/${vectorUpsertRows.length}`);
615
+ }
616
+ }
617
+ }
618
+ if (lines.length > 0) {
619
+ ensureDirForFile(archivePath);
620
+ fs.appendFileSync(archivePath, `${lines.join("\n")}\n`, "utf-8");
621
+ for (let i = 0; i < lines.length; i++) {
622
+ const validation = (0, llm_output_validator_1.validateJsonlLine)(lines[i]);
623
+ if (!validation.valid && validation.errors.length > 0) {
624
+ options.logger.warn(`archive_write_integrity_check_failed line=${i} errors=${validation.errors.join("|")}`);
625
+ }
626
+ }
627
+ ensureDirForFile(mutationLogPath);
628
+ fs.appendFileSync(mutationLogPath, `${mutationLines.join("\n")}\n`, "utf-8");
629
+ }
630
+ return { stored, skipped };
631
+ }
632
+ options.logger.info(`Archive store initialized at ${archivePath}`);
633
+ return { storeEvents };
634
+ }
635
+ //# sourceMappingURL=archive_store.js.map