@wipcomputer/memory-crystal 0.7.34-alpha.2 → 0.7.34-alpha.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (120) hide show
  1. package/LICENSE +1 -1
  2. package/dist/bridge.js +64 -7
  3. package/dist/bulk-copy.js +67 -16
  4. package/dist/cc-hook.js +2163 -62
  5. package/dist/cc-poller.js +1967 -70
  6. package/dist/cli.js +4538 -139
  7. package/dist/core.js +1789 -6
  8. package/dist/crypto.js +153 -14
  9. package/dist/crystal-serve.js +64 -12
  10. package/dist/doctor.js +517 -52
  11. package/dist/dream-weaver.js +1755 -7
  12. package/dist/file-sync.js +407 -9
  13. package/dist/installer.js +840 -145
  14. package/dist/ldm.js +231 -16
  15. package/dist/mcp-server.js +1882 -17
  16. package/dist/migrate.js +1707 -11
  17. package/dist/mirror-sync.js +2052 -34
  18. package/dist/openclaw.js +1895 -84
  19. package/dist/pair.js +112 -16
  20. package/dist/poller.js +2275 -80
  21. package/dist/role.js +159 -7
  22. package/dist/staging.js +235 -10
  23. package/dist/summarize.js +142 -5
  24. package/package.json +7 -4
  25. package/dist/chunk-25LXQJ4Z.js +0 -110
  26. package/dist/chunk-2DRXIRQW.js +0 -97
  27. package/dist/chunk-2GBYLMEF.js +0 -1385
  28. package/dist/chunk-2ZNH5F6E.js +0 -1281
  29. package/dist/chunk-3G3SFYYI.js +0 -288
  30. package/dist/chunk-3RG5ZIWI.js +0 -10
  31. package/dist/chunk-3S6TI23B.js +0 -97
  32. package/dist/chunk-3VFIJYS4.js +0 -818
  33. package/dist/chunk-437F27T6.js +0 -97
  34. package/dist/chunk-52QE3YI3.js +0 -1169
  35. package/dist/chunk-57RP3DIN.js +0 -1205
  36. package/dist/chunk-5HSZ4W2P.js +0 -62
  37. package/dist/chunk-5I7GMRDN.js +0 -146
  38. package/dist/chunk-645IPXW3.js +0 -290
  39. package/dist/chunk-7A7ELD4C.js +0 -1205
  40. package/dist/chunk-7FYY4GZM.js +0 -1205
  41. package/dist/chunk-7IUE7ODU.js +0 -254
  42. package/dist/chunk-7RMLKZIS.js +0 -108
  43. package/dist/chunk-AA3OPP4Z.js +0 -432
  44. package/dist/chunk-AEWLSYPH.js +0 -72
  45. package/dist/chunk-ASSZDR6I.js +0 -108
  46. package/dist/chunk-AYRJVWUC.js +0 -1205
  47. package/dist/chunk-CCYI5O3D.js +0 -148
  48. package/dist/chunk-CGIDSAJB.js +0 -288
  49. package/dist/chunk-D3I3ZSE2.js +0 -411
  50. package/dist/chunk-D3MACYZ4.js +0 -108
  51. package/dist/chunk-DACSKLY6.js +0 -219
  52. package/dist/chunk-DFQ72B7M.js +0 -248
  53. package/dist/chunk-DW5B4BL7.js +0 -108
  54. package/dist/chunk-EKSACBTJ.js +0 -1070
  55. package/dist/chunk-EXEZZADG.js +0 -248
  56. package/dist/chunk-F3Y7EL7K.js +0 -83
  57. package/dist/chunk-FBQWSDPC.js +0 -1328
  58. package/dist/chunk-FHRZNOMW.js +0 -1205
  59. package/dist/chunk-IM7N24MT.js +0 -129
  60. package/dist/chunk-IPNYIXFK.js +0 -1178
  61. package/dist/chunk-J7MRSZIO.js +0 -167
  62. package/dist/chunk-JITKI2OI.js +0 -106
  63. package/dist/chunk-JWZXYVET.js +0 -1068
  64. package/dist/chunk-KCQUXVYT.js +0 -108
  65. package/dist/chunk-KOQ43OX6.js +0 -1281
  66. package/dist/chunk-KYVWO6ZM.js +0 -1069
  67. package/dist/chunk-L3VHARQH.js +0 -413
  68. package/dist/chunk-LBWDS6BE.js +0 -288
  69. package/dist/chunk-LOVAHSQV.js +0 -411
  70. package/dist/chunk-LQOYCAGG.js +0 -446
  71. package/dist/chunk-LWAIPJ2W.js +0 -146
  72. package/dist/chunk-M5DHKW7M.js +0 -127
  73. package/dist/chunk-MBKCIJHM.js +0 -1328
  74. package/dist/chunk-MK42FMEG.js +0 -147
  75. package/dist/chunk-MOBMYHKL.js +0 -1205
  76. package/dist/chunk-MPLTNMRG.js +0 -67
  77. package/dist/chunk-NIJCVN3O.js +0 -147
  78. package/dist/chunk-NX647OM3.js +0 -310
  79. package/dist/chunk-NZCFSZQ7.js +0 -1205
  80. package/dist/chunk-O2UITJGH.js +0 -465
  81. package/dist/chunk-OCRA44AZ.js +0 -108
  82. package/dist/chunk-P3KJR66H.js +0 -117
  83. package/dist/chunk-PEK6JH65.js +0 -432
  84. package/dist/chunk-PJ6FFKEX.js +0 -77
  85. package/dist/chunk-PLUBBZYR.js +0 -800
  86. package/dist/chunk-PNKVD2UK.js +0 -26
  87. package/dist/chunk-PSQZURHO.js +0 -229
  88. package/dist/chunk-SGL6ISBJ.js +0 -1061
  89. package/dist/chunk-SJABZZT5.js +0 -97
  90. package/dist/chunk-TD3P3K32.js +0 -1199
  91. package/dist/chunk-TMDZJJKV.js +0 -288
  92. package/dist/chunk-UNHVZB5G.js +0 -411
  93. package/dist/chunk-VAFTWSTE.js +0 -1061
  94. package/dist/chunk-VNFXFQBB.js +0 -217
  95. package/dist/chunk-X3GVFKSJ.js +0 -1205
  96. package/dist/chunk-XZ3S56RQ.js +0 -1061
  97. package/dist/chunk-Y72C7F6O.js +0 -148
  98. package/dist/chunk-YLICP577.js +0 -1205
  99. package/dist/chunk-YX6AXLVK.js +0 -159
  100. package/dist/chunk-ZCQYHTNU.js +0 -146
  101. package/dist/cloud-crystal.js +0 -6
  102. package/dist/dev-update-SZ2Z4WCQ.js +0 -6
  103. package/dist/llm-XXLYPIOF.js +0 -16
  104. package/dist/mlx-setup-XKU67WCT.js +0 -289
  105. package/dist/search-pipeline-4K4OJSSS.js +0 -255
  106. package/dist/search-pipeline-4PRS6LI7.js +0 -280
  107. package/dist/search-pipeline-7UJMXPLO.js +0 -280
  108. package/dist/search-pipeline-CBV25NX7.js +0 -99
  109. package/dist/search-pipeline-DQTRLGBH.js +0 -74
  110. package/dist/search-pipeline-HNG37REH.js +0 -282
  111. package/dist/search-pipeline-IZFPLBUB.js +0 -280
  112. package/dist/search-pipeline-MID6F26Q.js +0 -73
  113. package/dist/search-pipeline-N52JZFNN.js +0 -282
  114. package/dist/search-pipeline-OPB2PRQQ.js +0 -280
  115. package/dist/search-pipeline-VXTE5HAD.js +0 -262
  116. package/dist/search-pipeline-XHFKADRG.js +0 -73
  117. package/dist/worker-demo.js +0 -186
  118. package/dist/worker-mcp.js +0 -404
  119. package/scripts/crystal-capture 2.sh +0 -29
  120. package/scripts/deploy-cloud 2.sh +0 -153
package/dist/openclaw.js CHANGED
@@ -1,38 +1,1849 @@
1
- import {
2
- Crystal,
3
- resolveConfig
4
- } from "./chunk-2GBYLMEF.js";
5
- import {
6
- ensureLdm,
7
- resolveStatePath,
8
- stateWritePath
9
- } from "./chunk-DFQ72B7M.js";
1
+ var __defProp = Object.defineProperty;
2
+ var __getOwnPropNames = Object.getOwnPropertyNames;
3
+ var __esm = (fn, res) => function __init() {
4
+ return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
5
+ };
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
10
 
11
- // src/dev-update.ts
11
+ // src/llm.ts
12
+ import { existsSync, readFileSync } from "fs";
13
+ import { join } from "path";
14
+ import { homedir } from "os";
12
15
  import { execSync } from "child_process";
13
- import { existsSync, mkdirSync, writeFileSync, readFileSync } from "fs";
14
- import { join, basename } from "path";
16
+ function dbCacheGet(key) {
17
+ if (!_cacheDb) return null;
18
+ try {
19
+ const row = _cacheDb.prepare(
20
+ "SELECT result FROM llm_cache WHERE cache_key = ? AND created_at > ?"
21
+ ).get(key, new Date(Date.now() - CACHE_TTL_DAYS * 864e5).toISOString());
22
+ if (row) {
23
+ _cacheDb.prepare("UPDATE llm_cache SET hit_count = hit_count + 1, last_hit_at = ? WHERE cache_key = ?").run((/* @__PURE__ */ new Date()).toISOString(), key);
24
+ return row.result;
25
+ }
26
+ } catch {
27
+ }
28
+ return null;
29
+ }
30
+ function dbCacheSet(key, type, query, intent, result, provider) {
31
+ if (!_cacheDb) return;
32
+ try {
33
+ _cacheDb.prepare(
34
+ "INSERT OR REPLACE INTO llm_cache (cache_key, cache_type, query, intent, result, provider, created_at, hit_count, last_hit_at) VALUES (?, ?, ?, ?, ?, ?, ?, 0, NULL)"
35
+ ).run(key, type, query, intent || null, result, provider, (/* @__PURE__ */ new Date()).toISOString());
36
+ } catch {
37
+ }
38
+ }
39
+ function getOpSecret(itemName, fieldLabel) {
40
+ try {
41
+ const saTokenPath = join(homedir(), ".openclaw/secrets/op-sa-token");
42
+ if (!existsSync(saTokenPath)) return void 0;
43
+ const saToken = readFileSync(saTokenPath, "utf-8").trim();
44
+ const result = execSync(
45
+ `OP_SERVICE_ACCOUNT_TOKEN="${saToken}" op item get "${itemName}" --vault "Agent Secrets" --fields "${fieldLabel}" --reveal`,
46
+ { encoding: "utf-8", timeout: 5e3, stdio: ["pipe", "pipe", "pipe"] }
47
+ ).trim();
48
+ return result || void 0;
49
+ } catch {
50
+ return void 0;
51
+ }
52
+ }
53
+ async function detectProvider() {
54
+ if (detectionDone && detectedProvider) return detectedProvider;
55
+ detectionDone = true;
56
+ if (samplingServer) {
57
+ detectedProvider = { provider: "sampling", baseURL: "", apiKey: "", model: "client-selected" };
58
+ process.stderr.write("[memory-crystal] LLM provider: MCP Sampling (via client)\n");
59
+ return detectedProvider;
60
+ }
61
+ try {
62
+ const resp = await fetch("http://localhost:18791/v1/models", { signal: AbortSignal.timeout(1e3) });
63
+ if (resp.ok) {
64
+ const data = await resp.json();
65
+ const model = data?.data?.[0]?.id || "default";
66
+ detectedProvider = { provider: "mlx", baseURL: "http://localhost:18791/v1", apiKey: "not-needed", model };
67
+ process.stderr.write(`[memory-crystal] LLM provider: MLX (${model})
68
+ `);
69
+ return detectedProvider;
70
+ }
71
+ } catch {
72
+ }
73
+ try {
74
+ const resp = await fetch("http://localhost:11434/api/tags", { signal: AbortSignal.timeout(1e3) });
75
+ if (resp.ok) {
76
+ const data = await resp.json();
77
+ const models = data?.models || [];
78
+ const embeddingOnly = ["nomic-embed-text", "mxbai-embed", "all-minilm", "snowflake-arctic-embed"];
79
+ const chatModel = models.find((m) => !embeddingOnly.some((e) => m.name.startsWith(e)));
80
+ if (chatModel) {
81
+ detectedProvider = { provider: "ollama", baseURL: "http://localhost:11434/v1", apiKey: "ollama", model: chatModel.name };
82
+ process.stderr.write(`[memory-crystal] LLM provider: Ollama (${chatModel.name})
83
+ `);
84
+ return detectedProvider;
85
+ }
86
+ }
87
+ } catch {
88
+ }
89
+ const openaiKey = process.env.OPENAI_API_KEY || getOpSecret("OpenAI API", "api key");
90
+ if (openaiKey) {
91
+ detectedProvider = { provider: "openai", baseURL: "https://api.openai.com/v1", apiKey: openaiKey, model: "gpt-4o-mini" };
92
+ process.stderr.write("[memory-crystal] LLM provider: OpenAI API\n");
93
+ return detectedProvider;
94
+ }
95
+ const anthropicKey = process.env.ANTHROPIC_API_KEY || getOpSecret("Anthropic Auth Token - remote bunkers", "Auth Token");
96
+ if (anthropicKey && !anthropicKey.startsWith("sk-ant-oat")) {
97
+ detectedProvider = { provider: "anthropic", baseURL: "https://api.anthropic.com", apiKey: anthropicKey, model: "claude-haiku-4-5-20251001" };
98
+ process.stderr.write("[memory-crystal] LLM provider: Anthropic API\n");
99
+ return detectedProvider;
100
+ }
101
+ detectedProvider = { provider: "none", baseURL: "", apiKey: "", model: "" };
102
+ process.stderr.write("[memory-crystal] LLM provider: none (deep search unavailable)\n");
103
+ return detectedProvider;
104
+ }
105
+ async function chatComplete(config, messages, maxTokens = 300) {
106
+ if (config.provider === "sampling") {
107
+ return samplingComplete(messages, maxTokens);
108
+ }
109
+ if (config.provider === "anthropic") {
110
+ return anthropicComplete(config, messages, maxTokens);
111
+ }
112
+ const resp = await fetch(`${config.baseURL}/chat/completions`, {
113
+ method: "POST",
114
+ headers: {
115
+ "Content-Type": "application/json",
116
+ "Authorization": `Bearer ${config.apiKey}`
117
+ },
118
+ body: JSON.stringify({
119
+ model: config.model,
120
+ messages,
121
+ max_tokens: maxTokens,
122
+ temperature: 0.7
123
+ })
124
+ });
125
+ if (!resp.ok) throw new Error(`LLM request failed: ${resp.status}`);
126
+ const data = await resp.json();
127
+ return data.choices?.[0]?.message?.content || "";
128
+ }
129
+ async function anthropicComplete(config, messages, maxTokens) {
130
+ const systemMsg = messages.find((m) => m.role === "system");
131
+ const userMessages = messages.filter((m) => m.role !== "system");
132
+ const body = {
133
+ model: config.model,
134
+ max_tokens: maxTokens,
135
+ messages: userMessages
136
+ };
137
+ if (systemMsg) body.system = systemMsg.content;
138
+ const resp = await fetch("https://api.anthropic.com/v1/messages", {
139
+ method: "POST",
140
+ headers: {
141
+ "Content-Type": "application/json",
142
+ "x-api-key": config.apiKey,
143
+ "anthropic-version": "2023-06-01"
144
+ },
145
+ body: JSON.stringify(body)
146
+ });
147
+ if (!resp.ok) throw new Error(`Anthropic request failed: ${resp.status}`);
148
+ const data = await resp.json();
149
+ return data.content?.[0]?.text || "";
150
+ }
151
+ async function samplingComplete(messages, maxTokens) {
152
+ if (!samplingServer) throw new Error("MCP sampling server not set");
153
+ const systemMsg = messages.find((m) => m.role === "system");
154
+ const userMessages = messages.filter((m) => m.role !== "system");
155
+ const result = await samplingServer.createMessage({
156
+ messages: userMessages.map((m) => ({
157
+ role: m.role,
158
+ content: { type: "text", text: m.content }
159
+ })),
160
+ systemPrompt: systemMsg?.content,
161
+ maxTokens,
162
+ modelPreferences: {
163
+ // Request cheap, fast model (Haiku-class). We don't need Opus for query expansion.
164
+ costPriority: 0.9,
165
+ speedPriority: 0.8,
166
+ intelligencePriority: 0.3,
167
+ hints: [{ name: "haiku" }]
168
+ }
169
+ });
170
+ if (result?.content?.type === "text") return result.content.text;
171
+ if (typeof result?.content === "string") return result.content;
172
+ return "";
173
+ }
174
+ async function expandQuery(query, intent) {
175
+ const cacheKey = intent ? `expand:${query}||${intent}` : `expand:${query}`;
176
+ const dbCached = dbCacheGet(cacheKey);
177
+ if (dbCached) {
178
+ try {
179
+ return JSON.parse(dbCached);
180
+ } catch {
181
+ }
182
+ }
183
+ const cached = expansionCache.get(cacheKey);
184
+ if (cached) return cached;
185
+ const config = await detectProvider();
186
+ if (config.provider === "none") return [];
187
+ try {
188
+ const intentContext = intent ? `
189
+ Query intent: ${intent}. Use this to guide your variations toward the intended domain.` : "";
190
+ const result = await chatComplete(config, [
191
+ { role: "system", content: EXPAND_PROMPT + intentContext },
192
+ { role: "user", content: query }
193
+ ], 300);
194
+ const lines = result.trim().split("\n");
195
+ const queryLower = query.toLowerCase();
196
+ const queryTerms = queryLower.replace(/[^a-z0-9\s]/g, " ").split(/\s+/).filter(Boolean);
197
+ const hasQueryTerm = (text) => {
198
+ const lower = text.toLowerCase();
199
+ if (queryTerms.length === 0) return true;
200
+ return queryTerms.some((term) => lower.includes(term));
201
+ };
202
+ const variations = lines.map((line) => {
203
+ const colonIdx = line.indexOf(":");
204
+ if (colonIdx === -1) return null;
205
+ const type = line.slice(0, colonIdx).trim();
206
+ if (type !== "lex" && type !== "vec" && type !== "hyde") return null;
207
+ const text = line.slice(colonIdx + 1).trim();
208
+ if (!text || !hasQueryTerm(text)) return null;
209
+ return { type, text };
210
+ }).filter((v) => v !== null);
211
+ if (variations.length > 0) {
212
+ expansionCache.set(cacheKey, variations);
213
+ dbCacheSet(cacheKey, "expansion", query, intent, JSON.stringify(variations), config.provider);
214
+ return variations;
215
+ }
216
+ } catch (err) {
217
+ process.stderr.write(`[memory-crystal] Query expansion failed: ${err.message}
218
+ `);
219
+ }
220
+ const fallback = [
221
+ { type: "lex", text: query },
222
+ { type: "vec", text: query },
223
+ { type: "hyde", text: `Information about ${query}` }
224
+ ];
225
+ return fallback;
226
+ }
227
+ async function rerankResults(query, passages) {
228
+ const config = await detectProvider();
229
+ if (config.provider === "none") {
230
+ return passages.map((_, i) => ({ index: i, score: 1 - i * 0.01 }));
231
+ }
232
+ const { createHash: createHash2 } = await import("crypto");
233
+ const contentHash = createHash2("sha256").update(passages.map((p) => p.slice(0, 200)).sort().join("|")).digest("hex").slice(0, 16);
234
+ const rerankCacheKey = `rerank:${query}||${contentHash}`;
235
+ const dbCachedRerank = dbCacheGet(rerankCacheKey);
236
+ if (dbCachedRerank) {
237
+ try {
238
+ return JSON.parse(dbCachedRerank);
239
+ } catch {
240
+ }
241
+ }
242
+ try {
243
+ const passageList = passages.map((p, i) => `[${i}] ${p.slice(0, 500)}`).join("\n\n");
244
+ const result = await chatComplete(config, [
245
+ { role: "system", content: RERANK_PROMPT },
246
+ { role: "user", content: `Query: ${query}
247
+
248
+ Passages:
249
+ ${passageList}` }
250
+ ], 200);
251
+ const results = [];
252
+ for (const line of result.trim().split("\n")) {
253
+ const match = line.match(/^(\d+):\s*([\d.]+)/);
254
+ if (match) {
255
+ results.push({ index: parseInt(match[1]), score: parseFloat(match[2]) });
256
+ }
257
+ }
258
+ const scored = new Set(results.map((r) => r.index));
259
+ for (let i = 0; i < passages.length; i++) {
260
+ if (!scored.has(i)) results.push({ index: i, score: 0 });
261
+ }
262
+ const sorted = results.sort((a, b) => b.score - a.score);
263
+ dbCacheSet(rerankCacheKey, "rerank", query, void 0, JSON.stringify(sorted), config.provider);
264
+ return sorted;
265
+ } catch (err) {
266
+ process.stderr.write(`[memory-crystal] Reranking failed: ${err.message}
267
+ `);
268
+ return passages.map((_, i) => ({ index: i, score: 1 - i * 0.01 }));
269
+ }
270
+ }
271
+ var samplingServer, expansionCache, _cacheDb, CACHE_TTL_DAYS, detectedProvider, detectionDone, EXPAND_PROMPT, RERANK_PROMPT;
272
+ var init_llm = __esm({
273
+ "src/llm.ts"() {
274
+ "use strict";
275
+ samplingServer = null;
276
+ expansionCache = /* @__PURE__ */ new Map();
277
+ _cacheDb = null;
278
+ CACHE_TTL_DAYS = parseInt(process.env.CRYSTAL_CACHE_TTL_DAYS || "7", 10);
279
+ detectedProvider = null;
280
+ detectionDone = false;
281
+ EXPAND_PROMPT = `You are a search query expander. Given a search query, generate exactly 3 variations to improve search recall.
282
+
283
+ Output exactly 3 lines in this format (no other text):
284
+ lex: <keyword-focused variation for full-text search>
285
+ vec: <semantic variation rephrased for embedding similarity>
286
+ hyde: <hypothetical document snippet that would answer this query>
287
+
288
+ Rules:
289
+ - Each variation must contain at least one term from the original query
290
+ - Keep variations concise (under 30 words each)
291
+ - lex should use specific keywords and synonyms
292
+ - vec should rephrase the intent naturally
293
+ - hyde should be a short passage as if answering the query`;
294
+ RERANK_PROMPT = `You are a search result re-ranker. Given a query and a list of text passages, rate each passage's relevance to the query.
295
+
296
+ Output one line per passage in this exact format:
297
+ <index>: <score>
298
+
299
+ Where index is the passage number (0-based) and score is a float from 0.0 to 1.0.
300
+ - 1.0 = perfectly relevant, directly answers the query
301
+ - 0.7 = highly relevant, closely related
302
+ - 0.4 = somewhat relevant, tangentially related
303
+ - 0.1 = barely relevant
304
+ - 0.0 = not relevant at all
305
+
306
+ Rate ALL passages. Output nothing else.`;
307
+ }
308
+ });
309
+
310
+ // src/search-pipeline.ts
311
+ var search_pipeline_exports = {};
312
+ __export(search_pipeline_exports, {
313
+ deepSearch: () => deepSearch
314
+ });
315
+ async function deepSearch(crystal, query, options = {}) {
316
+ const limit = options.limit || 5;
317
+ const candidateLimit = options.candidateLimit || DEFAULT_CANDIDATE_LIMIT;
318
+ const intent = options.intent;
319
+ const filter = options.filter;
320
+ const explain = options.explain || false;
321
+ const provider = await detectProvider();
322
+ if (provider.provider === "none") {
323
+ return crystal.search(query, limit, filter);
324
+ }
325
+ const db = crystal.sqliteDb;
326
+ if (!db) return crystal.search(query, limit, filter);
327
+ const sinceDate = filter?.since ? crystal.parseSince(filter.since) : void 0;
328
+ const untilDate = filter?.until ? crystal.parseSince(filter.until) : void 0;
329
+ const internalFilter = { ...filter, sinceDate, untilDate };
330
+ const initialFts = crystal.searchFTS(query, 20, internalFilter);
331
+ const topScore = initialFts[0]?.score ?? 0;
332
+ const secondScore = initialFts[1]?.score ?? 0;
333
+ const hasStrongSignal = !intent && initialFts.length > 0 && topScore >= STRONG_SIGNAL_MIN_SCORE && topScore - secondScore >= STRONG_SIGNAL_MIN_GAP;
334
+ const expanded = hasStrongSignal ? [] : await expandQuery(query, intent);
335
+ const allResultLists = [];
336
+ if (initialFts.length > 0) allResultLists.push(initialFts);
337
+ const [queryEmbedding] = await crystal.embed([query]);
338
+ const originalVec = crystal.searchVec(queryEmbedding, 30, internalFilter);
339
+ if (originalVec.length > 0) allResultLists.push(originalVec);
340
+ for (const variation of expanded) {
341
+ if (variation.type === "lex") {
342
+ const ftsResults = crystal.searchFTS(variation.text, 20, internalFilter);
343
+ if (ftsResults.length > 0) allResultLists.push(ftsResults);
344
+ } else {
345
+ const [embedding] = await crystal.embed([variation.text]);
346
+ const vecResults = crystal.searchVec(embedding, 20, internalFilter);
347
+ if (vecResults.length > 0) allResultLists.push(vecResults);
348
+ }
349
+ }
350
+ const weights = allResultLists.map((_, i) => i < 2 ? 2 : 1);
351
+ const fused = crystal.reciprocalRankFusion(allResultLists, weights);
352
+ const candidates = fused.slice(0, candidateLimit);
353
+ if (candidates.length === 0) return [];
354
+ const ftsScoreMap = /* @__PURE__ */ new Map();
355
+ const vecScoreMap = /* @__PURE__ */ new Map();
356
+ if (explain) {
357
+ for (const r of initialFts) ftsScoreMap.set(r.text.slice(0, 200), r.score);
358
+ for (const r of originalVec) vecScoreMap.set(r.text.slice(0, 200), r.score);
359
+ }
360
+ const passages = candidates.map((c) => c.text.slice(0, 500));
361
+ const rerankQuery = intent ? `${intent}: ${query}` : query;
362
+ const reranked = await rerankResults(rerankQuery, passages);
363
+ const now = Date.now();
364
+ const blended = reranked.map((r) => {
365
+ const candidate = candidates[r.index];
366
+ if (!candidate) return null;
367
+ const rrfRank = r.index + 1;
368
+ let rrfWeight;
369
+ if (rrfRank <= 3) rrfWeight = 0.75;
370
+ else if (rrfRank <= 10) rrfWeight = 0.6;
371
+ else rrfWeight = 0.4;
372
+ const rrfScore = 1 / rrfRank;
373
+ const blendedScore = rrfWeight * rrfScore + (1 - rrfWeight) * r.score;
374
+ const ageDays = candidate.created_at ? (now - new Date(candidate.created_at).getTime()) / 864e5 : 0;
375
+ const recency = candidate.created_at ? crystal.recencyWeight(ageDays) : 1;
376
+ const finalScore = blendedScore * recency;
377
+ const freshness = candidate.created_at ? crystal.freshnessLabel(ageDays) : void 0;
378
+ const result = {
379
+ ...candidate,
380
+ score: finalScore,
381
+ freshness
382
+ };
383
+ if (explain) {
384
+ const dedup = candidate.text.slice(0, 200);
385
+ result.explain = {
386
+ fts_score: ftsScoreMap.get(dedup),
387
+ vec_score: vecScoreMap.get(dedup),
388
+ rrf_rank: rrfRank,
389
+ rrf_score: rrfScore,
390
+ rerank_score: r.score,
391
+ recency_weight: recency,
392
+ final_score: finalScore
393
+ };
394
+ }
395
+ return result;
396
+ }).filter((r) => r !== null);
397
+ const sorted = blended.sort((a, b) => b.score - a.score).slice(0, limit);
398
+ const topNormScore = sorted[0]?.score || 1;
399
+ return sorted.map((r) => ({ ...r, score: Math.min(r.score / topNormScore * 0.95, 0.95) }));
400
+ }
401
+ var STRONG_SIGNAL_MIN_SCORE, STRONG_SIGNAL_MIN_GAP, DEFAULT_CANDIDATE_LIMIT;
402
+ var init_search_pipeline = __esm({
403
+ "src/search-pipeline.ts"() {
404
+ "use strict";
405
+ init_llm();
406
+ STRONG_SIGNAL_MIN_SCORE = 0.85;
407
+ STRONG_SIGNAL_MIN_GAP = 0.15;
408
+ DEFAULT_CANDIDATE_LIMIT = 40;
409
+ }
410
+ });
411
+
412
+ // src/core.ts
413
+ import * as lancedb from "@lancedb/lancedb";
414
+ import Database from "better-sqlite3";
415
+ import * as sqliteVec from "sqlite-vec";
416
+ import { readFileSync as readFileSync2, existsSync as existsSync2, mkdirSync, readdirSync, statSync } from "fs";
417
+ import { execSync as execSync2 } from "child_process";
418
+ import { join as join2, relative, extname, basename } from "path";
419
+ import { createHash } from "crypto";
420
+ import http from "http";
421
+ import https from "https";
422
+ async function embedOpenAI(texts, apiKey, model) {
423
+ return new Promise((resolve, reject) => {
424
+ const body = JSON.stringify({ input: texts, model });
425
+ const req = https.request({
426
+ hostname: "api.openai.com",
427
+ path: "/v1/embeddings",
428
+ method: "POST",
429
+ headers: {
430
+ "Content-Type": "application/json",
431
+ "Authorization": `Bearer ${apiKey}`,
432
+ "Content-Length": Buffer.byteLength(body)
433
+ },
434
+ timeout: 3e4
435
+ }, (res) => {
436
+ let data = "";
437
+ res.on("data", (chunk) => data += chunk);
438
+ res.on("end", () => {
439
+ if (res.statusCode !== 200) {
440
+ reject(new Error(`OpenAI API error ${res.statusCode}: ${data.slice(0, 200)}`));
441
+ return;
442
+ }
443
+ const parsed = JSON.parse(data);
444
+ resolve(parsed.data.map((d) => d.embedding));
445
+ });
446
+ });
447
+ req.on("error", reject);
448
+ req.on("timeout", () => {
449
+ req.destroy();
450
+ reject(new Error("OpenAI timeout"));
451
+ });
452
+ req.write(body);
453
+ req.end();
454
+ });
455
+ }
456
+ async function embedOllama(texts, host, model) {
457
+ const results = [];
458
+ for (const text of texts) {
459
+ const result = await new Promise((resolve, reject) => {
460
+ const url = new URL("/api/embeddings", host);
461
+ const body = JSON.stringify({ model, prompt: text });
462
+ const req = http.request({
463
+ hostname: url.hostname,
464
+ port: url.port,
465
+ path: url.pathname,
466
+ method: "POST",
467
+ headers: {
468
+ "Content-Type": "application/json",
469
+ "Content-Length": Buffer.byteLength(body)
470
+ },
471
+ timeout: 15e3
472
+ }, (res) => {
473
+ let data = "";
474
+ res.on("data", (chunk) => data += chunk);
475
+ res.on("end", () => {
476
+ if (res.statusCode !== 200) {
477
+ reject(new Error(`Ollama error ${res.statusCode}: ${data.slice(0, 200)}`));
478
+ return;
479
+ }
480
+ resolve(JSON.parse(data).embedding);
481
+ });
482
+ });
483
+ req.on("error", reject);
484
+ req.on("timeout", () => {
485
+ req.destroy();
486
+ reject(new Error("Ollama timeout"));
487
+ });
488
+ req.write(body);
489
+ req.end();
490
+ });
491
+ results.push(result);
492
+ }
493
+ return results;
494
+ }
495
+ async function embedGoogle(texts, apiKey, model) {
496
+ return new Promise((resolve, reject) => {
497
+ const body = JSON.stringify({
498
+ requests: texts.map((text) => ({ model: `models/${model}`, content: { parts: [{ text }] } }))
499
+ });
500
+ const req = https.request({
501
+ hostname: "generativelanguage.googleapis.com",
502
+ path: `/v1beta/models/${model}:batchEmbedContents?key=${apiKey}`,
503
+ method: "POST",
504
+ headers: {
505
+ "Content-Type": "application/json",
506
+ "Content-Length": Buffer.byteLength(body)
507
+ },
508
+ timeout: 3e4
509
+ }, (res) => {
510
+ let data = "";
511
+ res.on("data", (chunk) => data += chunk);
512
+ res.on("end", () => {
513
+ if (res.statusCode !== 200) {
514
+ reject(new Error(`Google API error ${res.statusCode}: ${data.slice(0, 200)}`));
515
+ return;
516
+ }
517
+ const parsed = JSON.parse(data);
518
+ resolve(parsed.embeddings.map((e) => e.values));
519
+ });
520
+ });
521
+ req.on("error", reject);
522
+ req.on("timeout", () => {
523
+ req.destroy();
524
+ reject(new Error("Google timeout"));
525
+ });
526
+ req.write(body);
527
+ req.end();
528
+ });
529
+ }
530
+ var Crystal = class _Crystal {
531
+ config;
532
+ lanceDb = null;
533
+ sqliteDb = null;
534
+ chunksTable = null;
535
+ vecDimensions = null;
536
+ constructor(config) {
537
+ this.config = config;
538
+ if (!existsSync2(config.dataDir)) {
539
+ mkdirSync(config.dataDir, { recursive: true });
540
+ }
541
+ }
542
+ // ── Initialization ──
543
+ async init() {
544
+ const lanceDir = join2(this.config.dataDir, "lance");
545
+ const sqlitePath = join2(this.config.dataDir, "crystal.db");
546
+ if (!existsSync2(lanceDir)) mkdirSync(lanceDir, { recursive: true });
547
+ this.lanceDb = await lancedb.connect(lanceDir);
548
+ this.sqliteDb = new Database(sqlitePath);
549
+ this.sqliteDb.pragma("journal_mode = WAL");
550
+ sqliteVec.load(this.sqliteDb);
551
+ this.initSqliteTables();
552
+ this.initChunksTables();
553
+ await this.initLanceTables();
554
+ }
555
+ initSqliteTables() {
556
+ const db = this.sqliteDb;
557
+ db.exec(`
558
+ CREATE TABLE IF NOT EXISTS sources (
559
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
560
+ type TEXT NOT NULL,
561
+ uri TEXT NOT NULL,
562
+ title TEXT,
563
+ agent_id TEXT NOT NULL,
564
+ metadata TEXT DEFAULT '{}',
565
+ ingested_at TEXT NOT NULL,
566
+ chunk_count INTEGER DEFAULT 0
567
+ );
568
+
569
+ CREATE TABLE IF NOT EXISTS capture_state (
570
+ agent_id TEXT NOT NULL,
571
+ source_id TEXT NOT NULL,
572
+ last_message_count INTEGER DEFAULT 0,
573
+ capture_count INTEGER DEFAULT 0,
574
+ last_capture_at TEXT,
575
+ PRIMARY KEY (agent_id, source_id)
576
+ );
577
+
578
+ CREATE TABLE IF NOT EXISTS memories (
579
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
580
+ text TEXT NOT NULL,
581
+ category TEXT NOT NULL DEFAULT 'fact',
582
+ confidence REAL NOT NULL DEFAULT 1.0,
583
+ source_ids TEXT DEFAULT '[]',
584
+ status TEXT NOT NULL DEFAULT 'active',
585
+ created_at TEXT NOT NULL,
586
+ updated_at TEXT NOT NULL
587
+ );
588
+
589
+ CREATE TABLE IF NOT EXISTS entities (
590
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
591
+ name TEXT NOT NULL UNIQUE,
592
+ type TEXT NOT NULL DEFAULT 'concept',
593
+ description TEXT,
594
+ properties TEXT DEFAULT '{}',
595
+ created_at TEXT NOT NULL,
596
+ updated_at TEXT NOT NULL
597
+ );
598
+
599
+ CREATE TABLE IF NOT EXISTS relationships (
600
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
601
+ source_id INTEGER NOT NULL REFERENCES entities(id),
602
+ target_id INTEGER NOT NULL REFERENCES entities(id),
603
+ type TEXT NOT NULL,
604
+ description TEXT,
605
+ weight REAL DEFAULT 1.0,
606
+ valid_from TEXT NOT NULL,
607
+ valid_until TEXT,
608
+ created_at TEXT NOT NULL
609
+ );
610
+
611
+ CREATE INDEX IF NOT EXISTS idx_sources_agent ON sources(agent_id);
612
+ CREATE INDEX IF NOT EXISTS idx_memories_status ON memories(status);
613
+ CREATE INDEX IF NOT EXISTS idx_entities_name ON entities(name);
614
+ CREATE INDEX IF NOT EXISTS idx_relationships_source ON relationships(source_id);
615
+ CREATE INDEX IF NOT EXISTS idx_relationships_target ON relationships(target_id);
616
+
617
+ -- LLM cache (persistent expansion + reranking results)
618
+ CREATE TABLE IF NOT EXISTS llm_cache (
619
+ cache_key TEXT PRIMARY KEY,
620
+ cache_type TEXT NOT NULL,
621
+ query TEXT NOT NULL,
622
+ intent TEXT,
623
+ result TEXT NOT NULL,
624
+ provider TEXT NOT NULL,
625
+ created_at TEXT NOT NULL,
626
+ hit_count INTEGER DEFAULT 0,
627
+ last_hit_at TEXT
628
+ );
629
+ CREATE INDEX IF NOT EXISTS idx_llm_cache_type ON llm_cache(cache_type);
630
+ CREATE INDEX IF NOT EXISTS idx_llm_cache_created ON llm_cache(created_at);
631
+
632
+ -- Source file indexing (optional feature)
633
+ CREATE TABLE IF NOT EXISTS source_collections (
634
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
635
+ name TEXT NOT NULL UNIQUE,
636
+ root_path TEXT NOT NULL,
637
+ glob_patterns TEXT NOT NULL DEFAULT '["**/*"]',
638
+ ignore_patterns TEXT NOT NULL DEFAULT '[]',
639
+ file_count INTEGER DEFAULT 0,
640
+ chunk_count INTEGER DEFAULT 0,
641
+ last_sync_at TEXT,
642
+ created_at TEXT NOT NULL
643
+ );
644
+
645
+ CREATE TABLE IF NOT EXISTS source_files (
646
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
647
+ collection_id INTEGER NOT NULL REFERENCES source_collections(id) ON DELETE CASCADE,
648
+ file_path TEXT NOT NULL,
649
+ file_hash TEXT NOT NULL,
650
+ file_size INTEGER NOT NULL,
651
+ chunk_count INTEGER DEFAULT 0,
652
+ last_indexed_at TEXT NOT NULL
653
+ );
654
+
655
+ CREATE UNIQUE INDEX IF NOT EXISTS idx_source_files_path ON source_files(collection_id, file_path);
656
+ CREATE INDEX IF NOT EXISTS idx_source_files_collection ON source_files(collection_id);
657
+ `);
658
+ }
659
+ initChunksTables() {
660
+ const db = this.sqliteDb;
661
+ db.exec(`
662
+ CREATE TABLE IF NOT EXISTS chunks (
663
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
664
+ text TEXT NOT NULL,
665
+ text_hash TEXT NOT NULL,
666
+ role TEXT,
667
+ source_type TEXT,
668
+ source_id TEXT,
669
+ agent_id TEXT,
670
+ token_count INTEGER,
671
+ created_at TEXT NOT NULL
672
+ );
673
+
674
+ CREATE INDEX IF NOT EXISTS idx_chunks_agent ON chunks(agent_id);
675
+ CREATE INDEX IF NOT EXISTS idx_chunks_source ON chunks(source_type);
676
+ CREATE INDEX IF NOT EXISTS idx_chunks_hash ON chunks(text_hash);
677
+ CREATE INDEX IF NOT EXISTS idx_chunks_created ON chunks(created_at);
678
+
679
+ -- FTS5 full-text search table
680
+ CREATE VIRTUAL TABLE IF NOT EXISTS chunks_fts USING fts5(
681
+ text,
682
+ tokenize='porter unicode61'
683
+ );
684
+
685
+ -- Sync trigger: populate FTS on chunk insert
686
+ CREATE TRIGGER IF NOT EXISTS chunks_fts_insert AFTER INSERT ON chunks
687
+ BEGIN
688
+ INSERT INTO chunks_fts(rowid, text) VALUES (NEW.id, NEW.text);
689
+ END;
690
+
691
+ -- Sync trigger: clean up FTS and vec on chunk delete
692
+ CREATE TRIGGER IF NOT EXISTS chunks_cleanup AFTER DELETE ON chunks
693
+ BEGIN
694
+ DELETE FROM chunks_vec WHERE chunk_id = OLD.id;
695
+ INSERT INTO chunks_fts(chunks_fts, rowid, text) VALUES('delete', OLD.id, OLD.text);
696
+ END;
697
+ `);
698
+ const vecTable = db.prepare(
699
+ `SELECT name FROM sqlite_master WHERE type='table' AND name='chunks_vec'`
700
+ ).get();
701
+ if (vecTable) {
702
+ try {
703
+ const row = db.prepare("SELECT embedding FROM chunks_vec LIMIT 1").get();
704
+ if (row?.embedding) {
705
+ this.vecDimensions = row.embedding.length / 4;
706
+ }
707
+ } catch {
708
+ }
709
+ }
710
+ }
711
+ ensureVecTable(dimensions) {
712
+ const db = this.sqliteDb;
713
+ const existing = db.prepare(
714
+ `SELECT name FROM sqlite_master WHERE type='table' AND name='chunks_vec'`
715
+ ).get();
716
+ if (!existing) {
717
+ db.exec(`
718
+ CREATE VIRTUAL TABLE chunks_vec USING vec0(
719
+ chunk_id INTEGER PRIMARY KEY,
720
+ embedding float[${dimensions}] distance_metric=cosine
721
+ );
722
+ `);
723
+ }
724
+ this.vecDimensions = dimensions;
725
+ }
726
+ async initLanceTables() {
727
+ const db = this.lanceDb;
728
+ const tableNames = await db.tableNames();
729
+ if (tableNames.includes("chunks")) {
730
+ this.chunksTable = await db.openTable("chunks");
731
+ }
732
+ }
733
+ // ── Embedding ──
734
+ async embed(texts) {
735
+ if (texts.length === 0) return [];
736
+ const cfg = this.config;
737
+ switch (cfg.embeddingProvider) {
738
+ case "openai": {
739
+ if (!cfg.openaiApiKey) throw new Error("OpenAI API key required");
740
+ const model = cfg.openaiModel || "text-embedding-3-small";
741
+ const maxCharsPerBatch = 8e5;
742
+ const results = [];
743
+ let batch = [];
744
+ let batchChars = 0;
745
+ for (const text of texts) {
746
+ if (batchChars + text.length > maxCharsPerBatch && batch.length > 0) {
747
+ results.push(...await embedOpenAI(batch, cfg.openaiApiKey, model));
748
+ batch = [];
749
+ batchChars = 0;
750
+ }
751
+ batch.push(text);
752
+ batchChars += text.length;
753
+ }
754
+ if (batch.length > 0) {
755
+ results.push(...await embedOpenAI(batch, cfg.openaiApiKey, model));
756
+ }
757
+ return results;
758
+ }
759
+ case "ollama":
760
+ return embedOllama(texts, cfg.ollamaHost || "http://localhost:11434", cfg.ollamaModel || "nomic-embed-text");
761
+ case "google":
762
+ if (!cfg.googleApiKey) throw new Error("Google API key required");
763
+ return embedGoogle(texts, cfg.googleApiKey, cfg.googleModel || "text-embedding-004");
764
+ default:
765
+ throw new Error(`Unknown embedding provider: ${cfg.embeddingProvider}`);
766
+ }
767
+ }
768
+ // ── Chunking ──
769
+ chunkText(text, targetTokens = 400, overlapTokens = 80) {
770
+ const targetChars = targetTokens * 4;
771
+ const overlapChars = overlapTokens * 4;
772
+ const chunks = [];
773
+ let start = 0;
774
+ while (start < text.length) {
775
+ let end = Math.min(start + targetChars, text.length);
776
+ if (end < text.length) {
777
+ const minBreak = start + Math.floor(targetChars * 0.5);
778
+ const paraBreak = text.lastIndexOf("\n\n", end);
779
+ if (paraBreak > minBreak) {
780
+ end = paraBreak;
781
+ } else {
782
+ const sentBreak = text.lastIndexOf(". ", end);
783
+ if (sentBreak > minBreak) {
784
+ end = sentBreak + 1;
785
+ }
786
+ }
787
+ }
788
+ const chunk = text.slice(start, end).trim();
789
+ if (chunk.length > 0) chunks.push(chunk);
790
+ if (end >= text.length) break;
791
+ start = end - overlapChars;
792
+ if (start <= (chunks.length > 0 ? end - targetChars : 0)) {
793
+ start = end;
794
+ }
795
+ }
796
+ return chunks;
797
+ }
798
+ // ── Ingest ──
799
+ async ingest(chunks) {
800
+ if (chunks.length === 0) return 0;
801
+ const db = this.sqliteDb;
802
+ const newChunks = chunks.filter((c) => {
803
+ const hash = createHash("sha256").update(c.text).digest("hex");
804
+ return !db.prepare("SELECT 1 FROM chunks WHERE text_hash = ?").get(hash);
805
+ });
806
+ if (newChunks.length === 0) return 0;
807
+ const texts = newChunks.map((c) => c.text);
808
+ const embeddings = await this.embed(texts);
809
+ if (!this.vecDimensions && embeddings.length > 0) {
810
+ this.ensureVecTable(embeddings[0].length);
811
+ }
812
+ const insertChunk = db.prepare(`
813
+ INSERT INTO chunks (text, text_hash, role, source_type, source_id, agent_id, token_count, created_at)
814
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
815
+ `);
816
+ const insertVec = db.prepare(`
817
+ INSERT INTO chunks_vec (chunk_id, embedding) VALUES (?, ?)
818
+ `);
819
+ const transaction = db.transaction(() => {
820
+ for (let i = 0; i < newChunks.length; i++) {
821
+ const c = newChunks[i];
822
+ const hash = createHash("sha256").update(c.text).digest("hex");
823
+ const result = insertChunk.run(
824
+ c.text,
825
+ hash,
826
+ c.role,
827
+ c.source_type,
828
+ c.source_id,
829
+ c.agent_id,
830
+ c.token_count,
831
+ c.created_at || (/* @__PURE__ */ new Date()).toISOString()
832
+ );
833
+ const chunkId = typeof result.lastInsertRowid === "bigint" ? result.lastInsertRowid : BigInt(result.lastInsertRowid);
834
+ insertVec.run(chunkId, new Float32Array(embeddings[i]));
835
+ }
836
+ });
837
+ transaction();
838
+ const records = newChunks.map((chunk, i) => ({
839
+ text: chunk.text,
840
+ vector: embeddings[i],
841
+ role: chunk.role,
842
+ source_type: chunk.source_type,
843
+ source_id: chunk.source_id,
844
+ agent_id: chunk.agent_id,
845
+ token_count: chunk.token_count,
846
+ created_at: chunk.created_at || (/* @__PURE__ */ new Date()).toISOString()
847
+ }));
848
+ try {
849
+ if (!this.chunksTable) {
850
+ this.chunksTable = await this.lanceDb.createTable("chunks", records);
851
+ } else {
852
+ await this.chunksTable.add(records);
853
+ }
854
+ } catch (err) {
855
+ console.warn("LanceDB dual-write failed (non-fatal):", err.message);
856
+ }
857
+ return newChunks.length;
858
+ }
859
+ // ── Delta Sync (export/import pre-embedded chunks) ──
860
+ /** Export interface for delta sync payloads. */
861
+ static DELTA_VERSION = 1;
862
+ /** Export chunks with IDs greater than sinceId. Returns pre-embedded chunks for delta sync.
863
+ * Core calls this to build delta payloads for Nodes. */
864
+ exportChunksSince(sinceId) {
865
+ const db = this.sqliteDb;
866
+ const rows = db.prepare(`
867
+ SELECT c.id, c.text, c.text_hash, c.role, c.source_type, c.source_id,
868
+ c.agent_id, c.token_count, c.created_at, v.embedding
869
+ FROM chunks c
870
+ LEFT JOIN chunks_vec v ON v.chunk_id = c.id
871
+ WHERE c.id > ?
872
+ ORDER BY c.id ASC
873
+ `).all(sinceId);
874
+ return rows.map((row) => ({
875
+ id: row.id,
876
+ text: row.text,
877
+ text_hash: row.text_hash,
878
+ role: row.role,
879
+ source_type: row.source_type,
880
+ source_id: row.source_id,
881
+ agent_id: row.agent_id,
882
+ token_count: row.token_count,
883
+ created_at: row.created_at,
884
+ // Convert Float32Array buffer to number[] for JSON serialization
885
+ embedding: row.embedding ? Array.from(new Float32Array(row.embedding.buffer, row.embedding.byteOffset, row.embedding.byteLength / 4)) : null
886
+ }));
887
+ }
888
+ /** Get the highest chunk ID in the database. Used for watermark tracking. */
889
+ getMaxChunkId() {
890
+ const db = this.sqliteDb;
891
+ const row = db.prepare("SELECT MAX(id) as maxId FROM chunks").get();
892
+ return row.maxId || 0;
893
+ }
894
+ /** Import pre-embedded chunks from Core. Node calls this to apply delta payloads.
895
+ * Skips chunks that already exist (by text_hash). Does NOT re-embed. */
896
+ importChunks(exported) {
897
+ if (exported.length === 0) return 0;
898
+ const db = this.sqliteDb;
899
+ const firstWithEmbed = exported.find((c) => c.embedding && c.embedding.length > 0);
900
+ if (firstWithEmbed && !this.vecDimensions) {
901
+ this.ensureVecTable(firstWithEmbed.embedding.length);
902
+ }
903
+ const insertChunk = db.prepare(`
904
+ INSERT INTO chunks (text, text_hash, role, source_type, source_id, agent_id, token_count, created_at)
905
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
906
+ `);
907
+ const insertVec = db.prepare(`
908
+ INSERT INTO chunks_vec (chunk_id, embedding) VALUES (?, ?)
909
+ `);
910
+ const checkHash = db.prepare("SELECT 1 FROM chunks WHERE text_hash = ?");
911
+ let imported = 0;
912
+ const transaction = db.transaction(() => {
913
+ for (const chunk of exported) {
914
+ if (checkHash.get(chunk.text_hash)) continue;
915
+ const result = insertChunk.run(
916
+ chunk.text,
917
+ chunk.text_hash,
918
+ chunk.role,
919
+ chunk.source_type,
920
+ chunk.source_id,
921
+ chunk.agent_id,
922
+ chunk.token_count,
923
+ chunk.created_at
924
+ );
925
+ if (chunk.embedding && chunk.embedding.length > 0) {
926
+ const chunkId = typeof result.lastInsertRowid === "bigint" ? result.lastInsertRowid : BigInt(result.lastInsertRowid);
927
+ insertVec.run(chunkId, new Float32Array(chunk.embedding));
928
+ }
929
+ imported++;
930
+ }
931
+ });
932
+ transaction();
933
+ return imported;
934
+ }
935
+ // ── Recency helpers ──
936
+ recencyWeight(ageDays) {
937
+ return Math.max(0.3, Math.exp(-ageDays * 0.1));
938
+ }
939
+ /** Parse relative time strings ("24h", "7d", "30d") or ISO dates into ISO date strings. */
940
+ parseSince(since) {
941
+ const match = since.match(/^(\d+)(h|d)$/);
942
+ if (match) {
943
+ const [, num, unit] = match;
944
+ const ms = unit === "h" ? parseInt(num) * 36e5 : parseInt(num) * 864e5;
945
+ return new Date(Date.now() - ms).toISOString();
946
+ }
947
+ const parsed = new Date(since);
948
+ if (!isNaN(parsed.getTime())) return parsed.toISOString();
949
+ return void 0;
950
+ }
951
+ freshnessLabel(ageDays) {
952
+ if (ageDays < 3) return "fresh";
953
+ if (ageDays < 7) return "recent";
954
+ if (ageDays < 14) return "aging";
955
+ return "stale";
956
+ }
957
+ // ── Search (Hybrid: BM25 + Vector + RRF fusion + Recency) ──
958
+ async search(query, limit = 5, filter) {
959
+ const db = this.sqliteDb;
960
+ const sqliteChunks = db.prepare("SELECT COUNT(*) as count FROM chunks").get()?.count || 0;
961
+ let lanceChunks = 0;
962
+ if (this.chunksTable) {
963
+ try {
964
+ lanceChunks = await this.chunksTable.countRows();
965
+ } catch {
966
+ }
967
+ }
968
+ if (sqliteChunks === 0 || lanceChunks > 0 && sqliteChunks < lanceChunks * 0.5) {
969
+ return this.searchLanceFallback(query, limit, filter);
970
+ }
971
+ const sinceDate = filter?.since ? this.parseSince(filter.since) : void 0;
972
+ const untilDate = filter?.until ? this.parseSince(filter.until) : void 0;
973
+ const [embedding] = await this.embed([query]);
974
+ const fetchLimit = Math.max(limit * 5, 50);
975
+ const vecResults = this.searchVec(embedding, fetchLimit, { ...filter, sinceDate, untilDate });
976
+ const ftsResults = this.searchFTS(query, fetchLimit, { ...filter, sinceDate, untilDate });
977
+ const fused = this.reciprocalRankFusion([ftsResults, vecResults], [2, 1]);
978
+ const now = Date.now();
979
+ const scored = fused.map((r) => {
980
+ const ageDays = r.created_at ? (now - new Date(r.created_at).getTime()) / 864e5 : 0;
981
+ const recency = r.created_at ? this.recencyWeight(ageDays) : 1;
982
+ const rescaled = r.score * recency;
983
+ return {
984
+ ...r,
985
+ score: rescaled,
986
+ freshness: r.created_at ? this.freshnessLabel(ageDays) : void 0
987
+ };
988
+ });
989
+ const sorted = scored.sort((a, b) => b.score - a.score).slice(0, limit);
990
+ const topScore = sorted[0]?.score || 1;
991
+ return sorted.map((r) => ({ ...r, score: Math.min(r.score / topScore * 0.95, 0.95) }));
992
+ }
993
+ /** Deep search: query expansion + LLM re-ranking + position-aware blending.
994
+ * Falls back to standard search if no LLM provider is available.
995
+ * Supports intent disambiguation, candidateLimit tuning, and explain traces. */
996
+ async deepSearch(query, limit = 5, filter, options) {
997
+ const { deepSearch: deepSearchFn } = await Promise.resolve().then(() => (init_search_pipeline(), search_pipeline_exports));
998
+ return deepSearchFn(this, query, { limit, filter, ...options });
999
+ }
1000
+ /** Structured search: pass pre-expanded queries to skip LLM expansion.
1001
+ * Each query is typed (lex, vec, hyde) and searched independently, then fused with RRF. */
1002
+ async structuredSearch(queries, limit = 5, filter) {
1003
+ const db = this.sqliteDb;
1004
+ const sinceDate = filter?.since ? this.parseSince(filter.since) : void 0;
1005
+ const untilDate = filter?.until ? this.parseSince(filter.until) : void 0;
1006
+ const internalFilter = { ...filter, sinceDate, untilDate };
1007
+ const allResultLists = [];
1008
+ for (const q of queries) {
1009
+ if (q.type === "lex") {
1010
+ const fts = this.searchFTS(q.text, Math.max(limit * 5, 50), internalFilter);
1011
+ if (fts.length > 0) allResultLists.push(fts);
1012
+ } else {
1013
+ const [embedding] = await this.embed([q.text]);
1014
+ const vec = this.searchVec(embedding, Math.max(limit * 5, 50), internalFilter);
1015
+ if (vec.length > 0) allResultLists.push(vec);
1016
+ }
1017
+ }
1018
+ const weights = allResultLists.map((_, i) => i === 0 ? 2 : 1);
1019
+ const fused = this.reciprocalRankFusion(allResultLists, weights);
1020
+ const now = Date.now();
1021
+ const scored = fused.map((r) => {
1022
+ const ageDays = r.created_at ? (now - new Date(r.created_at).getTime()) / 864e5 : 0;
1023
+ const recency = r.created_at ? this.recencyWeight(ageDays) : 1;
1024
+ return { ...r, score: r.score * recency, freshness: r.created_at ? this.freshnessLabel(ageDays) : void 0 };
1025
+ });
1026
+ const sorted = scored.sort((a, b) => b.score - a.score).slice(0, limit);
1027
+ const topScore = sorted[0]?.score || 1;
1028
+ return sorted.map((r) => ({ ...r, score: Math.min(r.score / topScore * 0.95, 0.95) }));
1029
+ }
1030
+ /** Vector search via sqlite-vec. Two-step pattern: MATCH first, then JOIN. */
1031
+ searchVec(embedding, limit, filter) {
1032
+ const db = this.sqliteDb;
1033
+ if (!this.vecDimensions) return [];
1034
+ const vecRows = db.prepare(`
1035
+ SELECT chunk_id, distance
1036
+ FROM chunks_vec
1037
+ WHERE embedding MATCH ? AND k = ?
1038
+ `).all(new Float32Array(embedding), limit);
1039
+ if (vecRows.length === 0) return [];
1040
+ const ids = vecRows.map((r) => r.chunk_id);
1041
+ const distMap = new Map(vecRows.map((r) => [r.chunk_id, r.distance]));
1042
+ const placeholders = ids.map(() => "?").join(",");
1043
+ let sql = `SELECT id, text, role, source_type, source_id, agent_id, created_at FROM chunks WHERE id IN (${placeholders})`;
1044
+ const params = [...ids];
1045
+ if (filter?.agent_id) {
1046
+ sql += " AND agent_id = ?";
1047
+ params.push(filter.agent_id);
1048
+ }
1049
+ if (filter?.source_type) {
1050
+ sql += " AND source_type = ?";
1051
+ params.push(filter.source_type);
1052
+ }
1053
+ if (filter?.sinceDate) {
1054
+ sql += " AND created_at >= ?";
1055
+ params.push(filter.sinceDate);
1056
+ }
1057
+ if (filter?.untilDate) {
1058
+ sql += " AND created_at < ?";
1059
+ params.push(filter.untilDate);
1060
+ }
1061
+ const rows = db.prepare(sql).all(...params);
1062
+ return rows.map((row) => ({
1063
+ text: row.text,
1064
+ role: row.role,
1065
+ score: 1 - (distMap.get(row.id) || 1),
1066
+ // cosine similarity from distance
1067
+ source_type: row.source_type,
1068
+ source_id: row.source_id,
1069
+ agent_id: row.agent_id,
1070
+ created_at: row.created_at
1071
+ }));
1072
+ }
1073
+ /** Full-text search via FTS5 with BM25 scoring. */
1074
+ searchFTS(query, limit, filter) {
1075
+ const db = this.sqliteDb;
1076
+ const ftsQuery = this.buildFTS5Query(query);
1077
+ if (!ftsQuery) return [];
1078
+ let sql = `
1079
+ SELECT c.id, c.text, c.role, c.source_type, c.source_id, c.agent_id, c.created_at,
1080
+ bm25(chunks_fts) as bm25_score
1081
+ FROM chunks_fts f
1082
+ JOIN chunks c ON c.id = f.rowid
1083
+ WHERE chunks_fts MATCH ?
1084
+ `;
1085
+ const params = [ftsQuery];
1086
+ if (filter?.agent_id) {
1087
+ sql += " AND c.agent_id = ?";
1088
+ params.push(filter.agent_id);
1089
+ }
1090
+ if (filter?.source_type) {
1091
+ sql += " AND c.source_type = ?";
1092
+ params.push(filter.source_type);
1093
+ }
1094
+ if (filter?.sinceDate) {
1095
+ sql += " AND c.created_at >= ?";
1096
+ params.push(filter.sinceDate);
1097
+ }
1098
+ if (filter?.untilDate) {
1099
+ sql += " AND c.created_at < ?";
1100
+ params.push(filter.untilDate);
1101
+ }
1102
+ sql += " ORDER BY bm25_score LIMIT ?";
1103
+ params.push(limit);
1104
+ const rows = db.prepare(sql).all(...params);
1105
+ return rows.map((row) => ({
1106
+ text: row.text,
1107
+ role: row.role,
1108
+ // BM25 scores are negative (lower = better). Normalize to [0..1).
1109
+ // |x| / (1 + |x|) maps: strong(-10)->0.91, medium(-2)->0.67, weak(-0.5)->0.33
1110
+ score: Math.abs(row.bm25_score) / (1 + Math.abs(row.bm25_score)),
1111
+ source_type: row.source_type,
1112
+ source_id: row.source_id,
1113
+ agent_id: row.agent_id,
1114
+ created_at: row.created_at
1115
+ }));
1116
+ }
1117
+ /** Build a safe FTS5 query from user input. */
1118
+ buildFTS5Query(query) {
1119
+ const terms = query.split(/\s+/).map((t) => t.replace(/[^\p{L}\p{N}']/gu, "").toLowerCase()).filter((t) => t.length > 0);
1120
+ if (terms.length === 0) return null;
1121
+ if (terms.length === 1) return `"${terms[0]}"*`;
1122
+ return terms.map((t) => `"${t}"*`).join(" AND ");
1123
+ }
1124
+ /**
1125
+ * Reciprocal Rank Fusion. Ported from QMD (MIT License, Tobi Lutke, 2024-2026).
1126
+ * Fuses multiple ranked result lists into one using RRF scoring.
1127
+ * Uses text content as dedup key (instead of QMD's file path).
1128
+ */
1129
+ reciprocalRankFusion(resultLists, weights = [], k = 60) {
1130
+ const scores = /* @__PURE__ */ new Map();
1131
+ for (let listIdx = 0; listIdx < resultLists.length; listIdx++) {
1132
+ const list = resultLists[listIdx];
1133
+ if (!list) continue;
1134
+ const weight = weights[listIdx] ?? 1;
1135
+ for (let rank = 0; rank < list.length; rank++) {
1136
+ const result = list[rank];
1137
+ if (!result) continue;
1138
+ const rrfContribution = weight / (k + rank + 1);
1139
+ const dedup = result.text.slice(0, 200);
1140
+ const existing = scores.get(dedup);
1141
+ if (existing) {
1142
+ existing.rrfScore += rrfContribution;
1143
+ existing.topRank = Math.min(existing.topRank, rank);
1144
+ } else {
1145
+ scores.set(dedup, {
1146
+ result,
1147
+ rrfScore: rrfContribution,
1148
+ topRank: rank
1149
+ });
1150
+ }
1151
+ }
1152
+ }
1153
+ for (const entry of scores.values()) {
1154
+ if (entry.topRank === 0) {
1155
+ entry.rrfScore += 0.05;
1156
+ } else if (entry.topRank <= 2) {
1157
+ entry.rrfScore += 0.02;
1158
+ }
1159
+ }
1160
+ return Array.from(scores.values()).sort((a, b) => b.rrfScore - a.rrfScore).map((e) => ({ ...e.result, score: e.rrfScore }));
1161
+ }
1162
+ /** LanceDB fallback for search (used when sqlite-vec tables are empty, pre-migration). */
1163
+ async searchLanceFallback(query, limit, filter) {
1164
+ if (!this.chunksTable) return [];
1165
+ const [embedding] = await this.embed([query]);
1166
+ const fetchLimit = Math.max(limit * 3, 30);
1167
+ let queryBuilder = this.chunksTable.vectorSearch(embedding).distanceType("cosine").limit(fetchLimit);
1168
+ if (filter?.agent_id) {
1169
+ queryBuilder = queryBuilder.where(`agent_id = '${filter.agent_id}'`);
1170
+ }
1171
+ if (filter?.source_type) {
1172
+ queryBuilder = queryBuilder.where(`source_type = '${filter.source_type}'`);
1173
+ }
1174
+ const results = await queryBuilder.toArray();
1175
+ const now = Date.now();
1176
+ return results.map((row) => {
1177
+ const cosine = row._distance != null ? 1 - row._distance : 0;
1178
+ const createdAt = row.created_at || "";
1179
+ const ageDays = createdAt ? (now - new Date(createdAt).getTime()) / 864e5 : 0;
1180
+ const weight = createdAt ? this.recencyWeight(ageDays) : 1;
1181
+ return {
1182
+ text: row.text,
1183
+ role: row.role,
1184
+ score: cosine * weight,
1185
+ source_type: row.source_type,
1186
+ source_id: row.source_id,
1187
+ agent_id: row.agent_id,
1188
+ created_at: createdAt,
1189
+ freshness: createdAt ? this.freshnessLabel(ageDays) : void 0
1190
+ };
1191
+ }).sort((a, b) => b.score - a.score).slice(0, limit);
1192
+ }
1193
+ // ── Remember (explicit fact storage) ──
1194
+ async remember(text, category = "fact") {
1195
+ const db = this.sqliteDb;
1196
+ const now = (/* @__PURE__ */ new Date()).toISOString();
1197
+ const stmt = db.prepare(`
1198
+ INSERT INTO memories (text, category, confidence, source_ids, status, created_at, updated_at)
1199
+ VALUES (?, ?, 1.0, '[]', 'active', ?, ?)
1200
+ `);
1201
+ const result = stmt.run(text, category, now, now);
1202
+ await this.ingest([{
1203
+ text,
1204
+ role: "system",
1205
+ source_type: "manual",
1206
+ source_id: `memory:${result.lastInsertRowid}`,
1207
+ agent_id: "system",
1208
+ token_count: Math.ceil(text.length / 4),
1209
+ created_at: now
1210
+ }]);
1211
+ return result.lastInsertRowid;
1212
+ }
1213
+ // ── Forget (deprecate a memory) ──
1214
+ forget(memoryId) {
1215
+ const db = this.sqliteDb;
1216
+ const now = (/* @__PURE__ */ new Date()).toISOString();
1217
+ const result = db.prepare(`
1218
+ UPDATE memories SET status = 'deprecated', updated_at = ? WHERE id = ? AND status = 'active'
1219
+ `).run(now, memoryId);
1220
+ return result.changes > 0;
1221
+ }
1222
+ // ── Status ──
1223
+ async status() {
1224
+ const db = this.sqliteDb;
1225
+ const sqliteChunks = db.prepare("SELECT COUNT(*) as count FROM chunks").get()?.count || 0;
1226
+ let lanceChunks = 0;
1227
+ if (this.chunksTable) {
1228
+ try {
1229
+ lanceChunks = await this.chunksTable.countRows();
1230
+ } catch {
1231
+ }
1232
+ }
1233
+ const chunks = Math.max(sqliteChunks, lanceChunks);
1234
+ const oldest = db.prepare("SELECT MIN(created_at) as ts FROM chunks").get()?.ts || null;
1235
+ const newest = db.prepare("SELECT MAX(created_at) as ts FROM chunks").get()?.ts || null;
1236
+ const memories = db.prepare("SELECT COUNT(*) as count FROM memories WHERE status = ?").get("active")?.count || 0;
1237
+ const sources = db.prepare("SELECT COUNT(*) as count FROM sources").get()?.count || 0;
1238
+ const chunkAgentRows = db.prepare("SELECT DISTINCT agent_id FROM chunks WHERE agent_id IS NOT NULL").all();
1239
+ const sourceAgentRows = db.prepare("SELECT DISTINCT agent_id FROM sources").all();
1240
+ const captureAgentRows = db.prepare("SELECT DISTINCT agent_id FROM capture_state").all();
1241
+ const agents = [.../* @__PURE__ */ new Set([
1242
+ ...chunkAgentRows.map((r) => r.agent_id),
1243
+ ...sourceAgentRows.map((r) => r.agent_id),
1244
+ ...captureAgentRows.map((r) => r.agent_id)
1245
+ ])];
1246
+ const captureInfo = db.prepare(
1247
+ "SELECT COUNT(*) as count, MAX(last_capture_at) as latest FROM capture_state"
1248
+ ).get();
1249
+ return {
1250
+ chunks,
1251
+ memories,
1252
+ sources,
1253
+ agents,
1254
+ oldestChunk: oldest,
1255
+ newestChunk: newest,
1256
+ embeddingProvider: this.config.embeddingProvider,
1257
+ dataDir: this.config.dataDir,
1258
+ capturedSessions: captureInfo?.count || 0,
1259
+ latestCapture: captureInfo?.latest || null
1260
+ };
1261
+ }
1262
+ // ── Capture State (for incremental ingestion) ──
1263
+ getCaptureState(agentId, sourceId) {
1264
+ const db = this.sqliteDb;
1265
+ const row = db.prepare("SELECT last_message_count, capture_count FROM capture_state WHERE agent_id = ? AND source_id = ?").get(agentId, sourceId);
1266
+ if (!row) return { lastMessageCount: 0, captureCount: 0 };
1267
+ return {
1268
+ lastMessageCount: row.last_message_count,
1269
+ captureCount: row.capture_count
1270
+ };
1271
+ }
1272
+ setCaptureState(agentId, sourceId, messageCount, captureCount) {
1273
+ const db = this.sqliteDb;
1274
+ db.prepare(`
1275
+ INSERT OR REPLACE INTO capture_state (agent_id, source_id, last_message_count, capture_count, last_capture_at)
1276
+ VALUES (?, ?, ?, ?, ?)
1277
+ `).run(agentId, sourceId, messageCount, captureCount, (/* @__PURE__ */ new Date()).toISOString());
1278
+ }
1279
+ // ── Source File Indexing (optional feature) ──
1280
+ //
1281
+ // Add directories as "collections", sync to index/re-index changed files.
1282
+ // All source chunks get source_type='file' so they're searchable alongside
1283
+ // conversations and memories. Nothing here is required... you can use MC
1284
+ // without ever touching sources.
1285
+ // Default patterns for files worth indexing
1286
+ static DEFAULT_INCLUDE = [
1287
+ "**/*.ts",
1288
+ "**/*.js",
1289
+ "**/*.tsx",
1290
+ "**/*.jsx",
1291
+ "**/*.py",
1292
+ "**/*.rs",
1293
+ "**/*.go",
1294
+ "**/*.java",
1295
+ "**/*.md",
1296
+ "**/*.txt",
1297
+ "**/*.json",
1298
+ "**/*.yaml",
1299
+ "**/*.yml",
1300
+ "**/*.toml",
1301
+ "**/*.sh",
1302
+ "**/*.bash",
1303
+ "**/*.zsh",
1304
+ "**/*.css",
1305
+ "**/*.html",
1306
+ "**/*.svg",
1307
+ "**/*.sql",
1308
+ "**/*.graphql",
1309
+ "**/*.c",
1310
+ "**/*.cpp",
1311
+ "**/*.h",
1312
+ "**/*.hpp",
1313
+ "**/*.swift",
1314
+ "**/*.kt",
1315
+ "**/*.rb",
1316
+ "**/*.env.example",
1317
+ "**/*.gitignore",
1318
+ "**/Makefile",
1319
+ "**/Dockerfile",
1320
+ "**/Cargo.toml",
1321
+ "**/package.json",
1322
+ "**/tsconfig.json"
1323
+ ];
1324
+ static DEFAULT_IGNORE = [
1325
+ "**/node_modules/**",
1326
+ "**/.git/**",
1327
+ "**/dist/**",
1328
+ "**/build/**",
1329
+ "**/.next/**",
1330
+ "**/.cache/**",
1331
+ "**/coverage/**",
1332
+ "**/__pycache__/**",
1333
+ "**/target/**",
1334
+ "**/vendor/**",
1335
+ "**/.venv/**",
1336
+ "**/*.lock",
1337
+ "**/package-lock.json",
1338
+ "**/yarn.lock",
1339
+ "**/bun.lockb",
1340
+ "**/*.min.js",
1341
+ "**/*.min.css",
1342
+ "**/*.map",
1343
+ "**/*.png",
1344
+ "**/*.jpg",
1345
+ "**/*.jpeg",
1346
+ "**/*.gif",
1347
+ "**/*.ico",
1348
+ "**/*.webp",
1349
+ "**/*.woff",
1350
+ "**/*.woff2",
1351
+ "**/*.ttf",
1352
+ "**/*.eot",
1353
+ "**/*.mp3",
1354
+ "**/*.mp4",
1355
+ "**/*.wav",
1356
+ "**/*.ogg",
1357
+ "**/*.webm",
1358
+ "**/*.zip",
1359
+ "**/*.tar",
1360
+ "**/*.gz",
1361
+ "**/*.br",
1362
+ "**/*.sqlite",
1363
+ "**/*.db",
1364
+ "**/*.lance/**",
1365
+ "**/*.jsonl",
1366
+ "**/secrets/**",
1367
+ "**/.env"
1368
+ ];
1369
+ /** Add a directory as a source collection for indexing. */
1370
+ async sourcesAdd(rootPath, name, options) {
1371
+ const db = this.sqliteDb;
1372
+ const now = (/* @__PURE__ */ new Date()).toISOString();
1373
+ const includePatterns = JSON.stringify(options?.include || _Crystal.DEFAULT_INCLUDE);
1374
+ const ignorePatterns = JSON.stringify(options?.ignore || _Crystal.DEFAULT_IGNORE);
1375
+ const existing = db.prepare("SELECT * FROM source_collections WHERE name = ?").get(name);
1376
+ if (existing) {
1377
+ throw new Error(`Collection "${name}" already exists. Use sourcesSync() to update it.`);
1378
+ }
1379
+ db.prepare(`
1380
+ INSERT INTO source_collections (name, root_path, glob_patterns, ignore_patterns, created_at)
1381
+ VALUES (?, ?, ?, ?, ?)
1382
+ `).run(name, rootPath, includePatterns, ignorePatterns, now);
1383
+ const row = db.prepare("SELECT * FROM source_collections WHERE name = ?").get(name);
1384
+ return row;
1385
+ }
1386
+ /** Remove a source collection and its file records. Chunks remain in LanceDB. */
1387
+ sourcesRemove(name) {
1388
+ const db = this.sqliteDb;
1389
+ const col = db.prepare("SELECT id FROM source_collections WHERE name = ?").get(name);
1390
+ if (!col) return false;
1391
+ db.prepare("DELETE FROM source_files WHERE collection_id = ?").run(col.id);
1392
+ db.prepare("DELETE FROM source_collections WHERE id = ?").run(col.id);
1393
+ return true;
1394
+ }
1395
+ /** Sync a collection: scan files, detect changes, re-index what changed. */
1396
+ async sourcesSync(name, options) {
1397
+ const db = this.sqliteDb;
1398
+ const startTime = Date.now();
1399
+ const batchSize = options?.batchSize || 20;
1400
+ const col = db.prepare("SELECT * FROM source_collections WHERE name = ?").get(name);
1401
+ if (!col) throw new Error(`Collection "${name}" not found. Add it first with sourcesAdd().`);
1402
+ const includePatterns = JSON.parse(col.glob_patterns);
1403
+ const ignorePatterns = JSON.parse(col.ignore_patterns);
1404
+ const files = this.scanDirectory(col.root_path, includePatterns, ignorePatterns);
1405
+ const existingFiles = /* @__PURE__ */ new Map();
1406
+ const rows = db.prepare("SELECT id, file_path, file_hash FROM source_files WHERE collection_id = ?").all(col.id);
1407
+ for (const row of rows) {
1408
+ existingFiles.set(row.file_path, { id: row.id, file_hash: row.file_hash });
1409
+ }
1410
+ let added = 0;
1411
+ let updated = 0;
1412
+ let removed = 0;
1413
+ let chunksAdded = 0;
1414
+ const now = (/* @__PURE__ */ new Date()).toISOString();
1415
+ const toIndex = [];
1416
+ for (const absPath of files) {
1417
+ const relPath = relative(col.root_path, absPath);
1418
+ let content;
1419
+ try {
1420
+ content = readFileSync2(absPath, "utf-8");
1421
+ } catch {
1422
+ continue;
1423
+ }
1424
+ const stat = statSync(absPath);
1425
+ if (stat.size > 500 * 1024) continue;
1426
+ const hash = createHash("sha256").update(content).digest("hex");
1427
+ const existing = existingFiles.get(relPath);
1428
+ if (existing) {
1429
+ existingFiles.delete(relPath);
1430
+ if (existing.file_hash === hash) continue;
1431
+ toIndex.push({ relPath, absPath, hash, size: stat.size, isUpdate: true });
1432
+ } else {
1433
+ toIndex.push({ relPath, absPath, hash, size: stat.size, isUpdate: false });
1434
+ }
1435
+ }
1436
+ if (options?.dryRun) {
1437
+ const newFiles = toIndex.filter((f) => !f.isUpdate).length;
1438
+ const updatedFiles = toIndex.filter((f) => f.isUpdate).length;
1439
+ return {
1440
+ collection: name,
1441
+ added: newFiles,
1442
+ updated: updatedFiles,
1443
+ removed: existingFiles.size,
1444
+ chunks_added: 0,
1445
+ duration_ms: Date.now() - startTime
1446
+ };
1447
+ }
1448
+ for (let i = 0; i < toIndex.length; i += batchSize) {
1449
+ const batch = toIndex.slice(i, i + batchSize);
1450
+ const allChunks = [];
1451
+ for (const file of batch) {
1452
+ const content = readFileSync2(file.absPath, "utf-8");
1453
+ const ext = extname(file.absPath);
1454
+ const fileName = basename(file.absPath);
1455
+ const header = `File: ${file.relPath}
1456
+
1457
+ `;
1458
+ const textChunks = this.chunkText(header + content, 400, 80);
1459
+ const fileChunks = textChunks.map((text) => ({
1460
+ text,
1461
+ role: "system",
1462
+ source_type: "file",
1463
+ source_id: `file:${name}:${file.relPath}`,
1464
+ agent_id: "system",
1465
+ token_count: Math.ceil(text.length / 4),
1466
+ created_at: now
1467
+ }));
1468
+ allChunks.push(...fileChunks);
1469
+ if (file.isUpdate) {
1470
+ db.prepare(`
1471
+ UPDATE source_files SET file_hash = ?, file_size = ?, chunk_count = ?, last_indexed_at = ?
1472
+ WHERE collection_id = ? AND file_path = ?
1473
+ `).run(file.hash, file.size, fileChunks.length, now, col.id, file.relPath);
1474
+ updated++;
1475
+ } else {
1476
+ db.prepare(`
1477
+ INSERT INTO source_files (collection_id, file_path, file_hash, file_size, chunk_count, last_indexed_at)
1478
+ VALUES (?, ?, ?, ?, ?, ?)
1479
+ `).run(col.id, file.relPath, file.hash, file.size, fileChunks.length, now);
1480
+ added++;
1481
+ }
1482
+ }
1483
+ if (allChunks.length > 0) {
1484
+ const ingested = await this.ingest(allChunks);
1485
+ chunksAdded += ingested;
1486
+ }
1487
+ }
1488
+ for (const [relPath, { id }] of existingFiles) {
1489
+ db.prepare("DELETE FROM source_files WHERE id = ?").run(id);
1490
+ removed++;
1491
+ }
1492
+ const fileCount = db.prepare("SELECT COUNT(*) as count FROM source_files WHERE collection_id = ?").get(col.id).count;
1493
+ const chunkCount = db.prepare("SELECT SUM(chunk_count) as total FROM source_files WHERE collection_id = ?").get(col.id).total || 0;
1494
+ db.prepare("UPDATE source_collections SET file_count = ?, chunk_count = ?, last_sync_at = ? WHERE id = ?").run(fileCount, chunkCount, now, col.id);
1495
+ return {
1496
+ collection: name,
1497
+ added,
1498
+ updated,
1499
+ removed,
1500
+ chunks_added: chunksAdded,
1501
+ duration_ms: Date.now() - startTime
1502
+ };
1503
+ }
1504
+ /** Get status of all source collections. */
1505
+ sourcesStatus() {
1506
+ const db = this.sqliteDb;
1507
+ const collections = db.prepare("SELECT name, root_path, file_count, chunk_count, last_sync_at FROM source_collections").all();
1508
+ const totalFiles = collections.reduce((sum, c) => sum + c.file_count, 0);
1509
+ const totalChunks = collections.reduce((sum, c) => sum + c.chunk_count, 0);
1510
+ return {
1511
+ collections: collections.map((c) => ({
1512
+ name: c.name,
1513
+ root_path: c.root_path,
1514
+ file_count: c.file_count,
1515
+ chunk_count: c.chunk_count,
1516
+ last_sync_at: c.last_sync_at
1517
+ })),
1518
+ total_files: totalFiles,
1519
+ total_chunks: totalChunks
1520
+ };
1521
+ }
1522
+ /** Scan a directory recursively, matching include/ignore patterns. */
1523
+ scanDirectory(rootPath, includePatterns, ignorePatterns) {
1524
+ const results = [];
1525
+ const allowedExtensions = /* @__PURE__ */ new Set();
1526
+ const allowedExactNames = /* @__PURE__ */ new Set();
1527
+ for (const pattern of includePatterns) {
1528
+ const extMatch = pattern.match(/\*\*\/\*(\.\w+)$/);
1529
+ if (extMatch) {
1530
+ allowedExtensions.add(extMatch[1]);
1531
+ }
1532
+ const nameMatch = pattern.match(/\*\*\/([^*]+)$/);
1533
+ if (nameMatch && !nameMatch[1].startsWith("*.")) {
1534
+ allowedExactNames.add(nameMatch[1]);
1535
+ }
1536
+ }
1537
+ const ignoreDirs = /* @__PURE__ */ new Set();
1538
+ for (const pattern of ignorePatterns) {
1539
+ const dirMatch = pattern.match(/\*\*\/([^/*]+)\/\*\*$/);
1540
+ if (dirMatch) {
1541
+ ignoreDirs.add(dirMatch[1]);
1542
+ }
1543
+ }
1544
+ const ignoreFiles = /* @__PURE__ */ new Set();
1545
+ for (const pattern of ignorePatterns) {
1546
+ const fileMatch = pattern.match(/\*\*\/\*(\.\w+)$/);
1547
+ if (fileMatch) {
1548
+ ignoreFiles.add(fileMatch[1]);
1549
+ }
1550
+ const exactMatch = pattern.match(/\*\*\/([^*]+)$/);
1551
+ if (exactMatch && !exactMatch[1].includes("/")) {
1552
+ ignoreFiles.add(exactMatch[1]);
1553
+ }
1554
+ }
1555
+ const walk = (dir) => {
1556
+ let entries;
1557
+ try {
1558
+ entries = readdirSync(dir);
1559
+ } catch {
1560
+ return;
1561
+ }
1562
+ for (const entry of entries) {
1563
+ const fullPath = join2(dir, entry);
1564
+ let stat;
1565
+ try {
1566
+ stat = statSync(fullPath);
1567
+ } catch {
1568
+ continue;
1569
+ }
1570
+ if (stat.isDirectory()) {
1571
+ if (ignoreDirs.has(entry)) continue;
1572
+ if (entry.startsWith(".")) continue;
1573
+ walk(fullPath);
1574
+ } else if (stat.isFile()) {
1575
+ const ext = extname(entry);
1576
+ if (ignoreFiles.has(ext)) continue;
1577
+ if (ignoreFiles.has(entry)) continue;
1578
+ if (allowedExtensions.has(ext) || allowedExactNames.has(entry)) {
1579
+ results.push(fullPath);
1580
+ }
1581
+ }
1582
+ }
1583
+ };
1584
+ walk(rootPath);
1585
+ return results;
1586
+ }
1587
+ // ── Orphan Cleanup ──
1588
+ /** Clean orphaned entries in chunks_vec and chunks_fts that no longer have
1589
+ * corresponding rows in the chunks table. Returns counts of what was found/cleaned. */
1590
+ cleanOrphans(options) {
1591
+ const db = this.sqliteDb;
1592
+ const dryRun = options?.dryRun ?? false;
1593
+ const orphanedVec = db.prepare(
1594
+ "SELECT COUNT(*) as cnt FROM chunks_vec WHERE chunk_id NOT IN (SELECT id FROM chunks)"
1595
+ ).get().cnt;
1596
+ const orphanedFts = db.prepare(
1597
+ "SELECT COUNT(*) as cnt FROM chunks_fts WHERE rowid NOT IN (SELECT id FROM chunks)"
1598
+ ).get().cnt;
1599
+ if (dryRun) {
1600
+ return { orphanedVec, orphanedFts, cleanedVec: 0, cleanedFts: 0, dryRun: true };
1601
+ }
1602
+ let cleanedVec = 0;
1603
+ if (orphanedVec > 0) {
1604
+ const ids = db.prepare(
1605
+ "SELECT chunk_id FROM chunks_vec WHERE chunk_id NOT IN (SELECT id FROM chunks)"
1606
+ ).all();
1607
+ const del = db.prepare("DELETE FROM chunks_vec WHERE chunk_id = ?");
1608
+ const BATCH = 1e3;
1609
+ for (let i = 0; i < ids.length; i += BATCH) {
1610
+ const batch = ids.slice(i, i + BATCH);
1611
+ db.transaction(() => {
1612
+ for (const r of batch) {
1613
+ del.run(r.chunk_id);
1614
+ cleanedVec++;
1615
+ }
1616
+ })();
1617
+ }
1618
+ }
1619
+ let cleanedFts = 0;
1620
+ if (orphanedFts > 0) {
1621
+ db.exec("DELETE FROM chunks_fts");
1622
+ db.exec("INSERT INTO chunks_fts(rowid, text) SELECT id, text FROM chunks");
1623
+ cleanedFts = orphanedFts;
1624
+ }
1625
+ return { orphanedVec, orphanedFts, cleanedVec, cleanedFts, dryRun: false };
1626
+ }
1627
+ // ── Cleanup ──
1628
+ close() {
1629
+ this.sqliteDb?.close();
1630
+ }
1631
+ };
1632
+ function resolveConfig(overrides) {
1633
+ const HOME3 = process.env.HOME || "";
1634
+ const ldmMemory = join2(HOME3, ".ldm", "memory");
1635
+ let dataDir = overrides?.dataDir || process.env.CRYSTAL_DATA_DIR;
1636
+ if (!dataDir) {
1637
+ if (existsSync2(join2(ldmMemory, "crystal.db"))) {
1638
+ dataDir = ldmMemory;
1639
+ } else {
1640
+ const legacyDir = join2(HOME3, ".openclaw", "memory-crystal");
1641
+ if (existsSync2(join2(legacyDir, "crystal.db"))) {
1642
+ dataDir = legacyDir;
1643
+ } else {
1644
+ dataDir = ldmMemory;
1645
+ }
1646
+ }
1647
+ }
1648
+ loadEnvFile(join2(dataDir, ".env"));
1649
+ const openaiApiKey = overrides?.openaiApiKey || process.env.OPENAI_API_KEY || opRead("OpenAI API", "api key");
1650
+ const googleApiKey = overrides?.googleApiKey || process.env.GOOGLE_API_KEY || opRead("Google AI", "api key");
1651
+ const remoteToken = overrides?.remoteToken || process.env.CRYSTAL_REMOTE_TOKEN || opRead("Memory Crystal Remote", "token");
1652
+ return {
1653
+ dataDir,
1654
+ embeddingProvider: overrides?.embeddingProvider || process.env.CRYSTAL_EMBEDDING_PROVIDER || "openai",
1655
+ openaiApiKey,
1656
+ openaiModel: overrides?.openaiModel || process.env.CRYSTAL_OPENAI_MODEL || "text-embedding-3-small",
1657
+ ollamaHost: overrides?.ollamaHost || process.env.CRYSTAL_OLLAMA_HOST || "http://localhost:11434",
1658
+ ollamaModel: overrides?.ollamaModel || process.env.CRYSTAL_OLLAMA_MODEL || "nomic-embed-text",
1659
+ googleApiKey,
1660
+ googleModel: overrides?.googleModel || process.env.CRYSTAL_GOOGLE_MODEL || "text-embedding-004",
1661
+ remoteUrl: overrides?.remoteUrl || process.env.CRYSTAL_REMOTE_URL,
1662
+ remoteToken
1663
+ };
1664
+ }
1665
+ function loadEnvFile(path) {
1666
+ if (!existsSync2(path)) return;
1667
+ const content = readFileSync2(path, "utf8");
1668
+ for (const line of content.split("\n")) {
1669
+ const trimmed = line.trim();
1670
+ if (!trimmed || trimmed.startsWith("#")) continue;
1671
+ const eqIdx = trimmed.indexOf("=");
1672
+ if (eqIdx === -1) continue;
1673
+ const key = trimmed.slice(0, eqIdx).trim();
1674
+ let value = trimmed.slice(eqIdx + 1).trim();
1675
+ if (value.startsWith('"') && value.endsWith('"') || value.startsWith("'") && value.endsWith("'")) {
1676
+ value = value.slice(1, -1);
1677
+ }
1678
+ if (key && !process.env[key]) {
1679
+ process.env[key] = value;
1680
+ }
1681
+ }
1682
+ }
1683
+ function opRead(item, field) {
1684
+ try {
1685
+ const HOME3 = process.env.HOME || "";
1686
+ let saTokenPath = join2(HOME3, ".ldm", "secrets", "op-sa-token");
1687
+ if (!existsSync2(saTokenPath)) {
1688
+ saTokenPath = join2(HOME3, ".openclaw", "secrets", "op-sa-token");
1689
+ }
1690
+ if (!existsSync2(saTokenPath)) return void 0;
1691
+ const saToken = readFileSync2(saTokenPath, "utf8").trim();
1692
+ return execSync2(`op read "op://Agent Secrets/${item}/${field}" 2>/dev/null`, {
1693
+ encoding: "utf8",
1694
+ env: { ...process.env, OP_SERVICE_ACCOUNT_TOKEN: saToken },
1695
+ timeout: 1e4
1696
+ }).trim() || void 0;
1697
+ } catch {
1698
+ return void 0;
1699
+ }
1700
+ }
1701
+
1702
+ // src/dev-update.ts
1703
+ import { execSync as execSync4 } from "child_process";
1704
+ import { existsSync as existsSync4, mkdirSync as mkdirSync3, writeFileSync as writeFileSync2, readFileSync as readFileSync4 } from "fs";
1705
+ import { join as join4, basename as basename2 } from "path";
1706
+
1707
+ // src/ldm.ts
1708
+ import { existsSync as existsSync3, mkdirSync as mkdirSync2, readFileSync as readFileSync3, writeFileSync, copyFileSync, chmodSync, readdirSync as readdirSync2 } from "fs";
1709
+ import { join as join3, dirname } from "path";
1710
+ import { execSync as execSync3 } from "child_process";
1711
+ import { fileURLToPath } from "url";
15
1712
  var HOME = process.env.HOME || "";
1713
+ var LDM_ROOT = join3(HOME, ".ldm");
1714
+ function loadAgentConfig(id) {
1715
+ const cfgPath = join3(LDM_ROOT, "agents", id, "config.json");
1716
+ try {
1717
+ if (existsSync3(cfgPath)) return JSON.parse(readFileSync3(cfgPath, "utf-8"));
1718
+ } catch {
1719
+ }
1720
+ return null;
1721
+ }
1722
+ function getAgentId(harnessHint) {
1723
+ if (process.env.CRYSTAL_AGENT_ID) return process.env.CRYSTAL_AGENT_ID;
1724
+ const agentsDir = join3(LDM_ROOT, "agents");
1725
+ if (existsSync3(agentsDir)) {
1726
+ try {
1727
+ for (const d of readdirSync2(agentsDir)) {
1728
+ const cfg = loadAgentConfig(d);
1729
+ if (!cfg || !cfg.agentId) continue;
1730
+ if (!harnessHint) return cfg.agentId;
1731
+ if (harnessHint === "claude-code" && cfg.harness === "claude-code-cli") return cfg.agentId;
1732
+ if (harnessHint === "openclaw" && cfg.harness === "openclaw") return cfg.agentId;
1733
+ }
1734
+ } catch {
1735
+ }
1736
+ }
1737
+ return harnessHint === "openclaw" ? "oc-lesa-mini" : "cc-mini";
1738
+ }
1739
+ function ldmPaths(agentId) {
1740
+ const id = agentId || getAgentId();
1741
+ const agentRoot = join3(LDM_ROOT, "agents", id);
1742
+ return {
1743
+ root: LDM_ROOT,
1744
+ bin: join3(LDM_ROOT, "bin"),
1745
+ secrets: join3(LDM_ROOT, "secrets"),
1746
+ state: join3(LDM_ROOT, "state"),
1747
+ config: join3(LDM_ROOT, "config.json"),
1748
+ crystalDb: join3(LDM_ROOT, "memory", "crystal.db"),
1749
+ crystalLance: join3(LDM_ROOT, "memory", "lance"),
1750
+ agentRoot,
1751
+ transcripts: join3(agentRoot, "memory", "transcripts"),
1752
+ sessions: join3(agentRoot, "memory", "sessions"),
1753
+ daily: join3(agentRoot, "memory", "daily"),
1754
+ journals: join3(agentRoot, "memory", "journals"),
1755
+ workspace: join3(agentRoot, "memory", "workspace")
1756
+ };
1757
+ }
1758
+ function loadConfig() {
1759
+ const configPath = join3(LDM_ROOT, "config.json");
1760
+ try {
1761
+ if (existsSync3(configPath)) {
1762
+ return JSON.parse(readFileSync3(configPath, "utf-8"));
1763
+ }
1764
+ } catch {
1765
+ }
1766
+ return null;
1767
+ }
1768
+ function saveConfig(config) {
1769
+ const configPath = join3(LDM_ROOT, "config.json");
1770
+ writeFileSync(configPath, JSON.stringify(config, null, 2) + "\n");
1771
+ }
1772
+ function scaffoldLdm(agentId) {
1773
+ const paths = ldmPaths(agentId);
1774
+ mkdirSync2(join3(paths.root, "memory"), { recursive: true });
1775
+ mkdirSync2(paths.crystalLance, { recursive: true });
1776
+ mkdirSync2(paths.bin, { recursive: true });
1777
+ mkdirSync2(paths.secrets, { recursive: true, mode: 448 });
1778
+ mkdirSync2(paths.state, { recursive: true });
1779
+ mkdirSync2(paths.transcripts, { recursive: true });
1780
+ mkdirSync2(paths.sessions, { recursive: true });
1781
+ mkdirSync2(paths.daily, { recursive: true });
1782
+ mkdirSync2(paths.journals, { recursive: true });
1783
+ mkdirSync2(paths.workspace, { recursive: true });
1784
+ const id = agentId || getAgentId();
1785
+ let config = loadConfig();
1786
+ if (!config) {
1787
+ config = {
1788
+ version: "1.0.0",
1789
+ agents: [id],
1790
+ createdAt: (/* @__PURE__ */ new Date()).toISOString(),
1791
+ updatedAt: (/* @__PURE__ */ new Date()).toISOString()
1792
+ };
1793
+ } else {
1794
+ if (!config.agents.includes(id)) {
1795
+ config.agents.push(id);
1796
+ }
1797
+ config.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
1798
+ }
1799
+ saveConfig(config);
1800
+ return paths;
1801
+ }
1802
+ var LEGACY_OC_DIR = join3(HOME, ".openclaw");
1803
+ function resolveStatePath(filename) {
1804
+ const paths = ldmPaths();
1805
+ const ldmPath = join3(paths.state, filename);
1806
+ if (existsSync3(ldmPath)) return ldmPath;
1807
+ const legacyPath = join3(LEGACY_OC_DIR, "memory", filename);
1808
+ if (existsSync3(legacyPath)) return legacyPath;
1809
+ return ldmPath;
1810
+ }
1811
+ function stateWritePath(filename) {
1812
+ const paths = ldmPaths();
1813
+ const dir = paths.state;
1814
+ if (!existsSync3(dir)) mkdirSync2(dir, { recursive: true });
1815
+ return join3(dir, filename);
1816
+ }
1817
+ function ensureLdm(agentId) {
1818
+ const paths = ldmPaths(agentId);
1819
+ if (existsSync3(paths.transcripts) && existsSync3(paths.config)) {
1820
+ return paths;
1821
+ }
1822
+ return scaffoldLdm(agentId);
1823
+ }
1824
+
1825
+ // src/dev-update.ts
1826
+ var HOME2 = process.env.HOME || "";
16
1827
  function resolveWorkspace() {
17
- const configPath = join(HOME, ".ldm", "config.json");
18
- if (existsSync(configPath)) {
1828
+ const configPath = join4(HOME2, ".ldm", "config.json");
1829
+ if (existsSync4(configPath)) {
19
1830
  try {
20
- const config = JSON.parse(readFileSync(configPath, "utf-8"));
1831
+ const config = JSON.parse(readFileSync4(configPath, "utf-8"));
21
1832
  if (config.workspace) return config.workspace;
22
1833
  } catch {
23
1834
  }
24
1835
  }
25
- return join(HOME, "wipcomputerinc");
1836
+ return join4(HOME2, "wipcomputerinc");
26
1837
  }
27
- var TEAM_DIR = join(resolveWorkspace(), "team");
28
- var CC_REPOS = join(TEAM_DIR, "cc-mini", "repos");
29
- var LESA_REPOS = join(TEAM_DIR, "L\u0113sa", "repos");
30
- var DEV_UPDATES_DIR = join(CC_REPOS, "wip-dev-updates");
1838
+ var TEAM_DIR = join4(resolveWorkspace(), "team");
1839
+ var CC_REPOS = join4(TEAM_DIR, "cc-mini", "repos");
1840
+ var LESA_REPOS = join4(TEAM_DIR, "L\u0113sa", "repos");
1841
+ var DEV_UPDATES_DIR = join4(CC_REPOS, "wip-dev-updates");
31
1842
  var LAST_RUN_PATH = resolveStatePath("dev-update-last-run.json");
32
1843
  function loadLastRun() {
33
1844
  try {
34
- if (existsSync(LAST_RUN_PATH)) {
35
- return JSON.parse(readFileSync(LAST_RUN_PATH, "utf-8"));
1845
+ if (existsSync4(LAST_RUN_PATH)) {
1846
+ return JSON.parse(readFileSync4(LAST_RUN_PATH, "utf-8"));
36
1847
  }
37
1848
  } catch {
38
1849
  }
@@ -40,11 +1851,11 @@ function loadLastRun() {
40
1851
  }
41
1852
  function saveLastRun(run) {
42
1853
  const writePath = stateWritePath("dev-update-last-run.json");
43
- writeFileSync(writePath, JSON.stringify(run, null, 2));
1854
+ writeFileSync2(writePath, JSON.stringify(run, null, 2));
44
1855
  }
45
1856
  function git(repoPath, cmd) {
46
1857
  try {
47
- return execSync(`git -C "${repoPath}" ${cmd}`, {
1858
+ return execSync4(`git -C "${repoPath}" ${cmd}`, {
48
1859
  encoding: "utf-8",
49
1860
  timeout: 1e4,
50
1861
  stdio: ["pipe", "pipe", "pipe"]
@@ -54,8 +1865,8 @@ function git(repoPath, cmd) {
54
1865
  }
55
1866
  }
56
1867
  function scanRepo(repoPath, since) {
57
- if (!existsSync(join(repoPath, ".git"))) return null;
58
- const name = basename(repoPath);
1868
+ if (!existsSync4(join4(repoPath, ".git"))) return null;
1869
+ const name = basename2(repoPath);
59
1870
  if (name === "_third-party-repos" || name === "wip-dev-updates") return null;
60
1871
  const recentCommits = git(repoPath, `log --oneline --since="${since}"`);
61
1872
  const uncommitted = git(repoPath, "status --porcelain");
@@ -122,32 +1933,32 @@ function runDevUpdate(author) {
122
1933
  const files = [];
123
1934
  const repoDirs = [CC_REPOS, LESA_REPOS];
124
1935
  for (const parentDir of repoDirs) {
125
- if (!existsSync(parentDir)) continue;
1936
+ if (!existsSync4(parentDir)) continue;
126
1937
  let entries;
127
1938
  try {
128
- entries = execSync(`ls "${parentDir}"`, { encoding: "utf-8" }).trim().split("\n");
1939
+ entries = execSync4(`ls "${parentDir}"`, { encoding: "utf-8" }).trim().split("\n");
129
1940
  } catch {
130
1941
  continue;
131
1942
  }
132
1943
  for (const entry of entries) {
133
- const repoPath = join(parentDir, entry);
1944
+ const repoPath = join4(parentDir, entry);
134
1945
  const content = scanRepo(repoPath, since);
135
1946
  if (!content) continue;
136
- const repoName = basename(repoPath);
137
- const outDir = join(repoPath, "ai");
138
- const outFile = join(outDir, `${now.toISOString().slice(0, 10)}--${now.toISOString().slice(11, 19).replace(/:/g, "-")}--${author}--dev-update-${repoName}.md`);
139
- mkdirSync(outDir, { recursive: true });
1947
+ const repoName = basename2(repoPath);
1948
+ const outDir = join4(repoPath, "ai");
1949
+ const outFile = join4(outDir, `${now.toISOString().slice(0, 10)}--${now.toISOString().slice(11, 19).replace(/:/g, "-")}--${author}--dev-update-${repoName}.md`);
1950
+ mkdirSync3(outDir, { recursive: true });
140
1951
  const header = `*Auto-generated dev update by ${author} at ${now.toISOString().slice(0, 16).replace("T", " ")}*
141
1952
 
142
1953
  `;
143
- writeFileSync(outFile, content.replace(/^# .+\n/, `$&
1954
+ writeFileSync2(outFile, content.replace(/^# .+\n/, `$&
144
1955
  ${header}`));
145
- files.push(`${repoName}/ai/${basename(outFile)}`);
1956
+ files.push(`${repoName}/ai/${basename2(outFile)}`);
146
1957
  }
147
1958
  }
148
1959
  if (false) {
149
1960
  try {
150
- execSync(
1961
+ execSync4(
151
1962
  `cd "${DEV_UPDATES_DIR}" && git add -A && git commit -m "${author} auto-dev-update ${ts}: ${files.length} repo(s)" --no-verify && git push --quiet`,
152
1963
  { encoding: "utf-8", timeout: 3e4, stdio: "pipe" }
153
1964
  );
@@ -164,20 +1975,20 @@ ${header}`));
164
1975
 
165
1976
  // src/openclaw.ts
166
1977
  import {
167
- existsSync as existsSync2,
168
- readFileSync as readFileSync2,
169
- writeFileSync as writeFileSync2,
170
- readdirSync,
171
- copyFileSync,
172
- statSync,
173
- mkdirSync as mkdirSync2
1978
+ existsSync as existsSync5,
1979
+ readFileSync as readFileSync5,
1980
+ writeFileSync as writeFileSync3,
1981
+ readdirSync as readdirSync3,
1982
+ copyFileSync as copyFileSync2,
1983
+ statSync as statSync2,
1984
+ mkdirSync as mkdirSync4
174
1985
  } from "fs";
175
- import { join as join2, basename as basename2 } from "path";
1986
+ import { join as join5, basename as basename3 } from "path";
176
1987
  var PRIVATE_MODE_PATH = resolveStatePath("memory-capture-state.json");
177
1988
  function isPrivateMode() {
178
1989
  try {
179
- if (existsSync2(PRIVATE_MODE_PATH)) {
180
- const state = JSON.parse(readFileSync2(PRIVATE_MODE_PATH, "utf-8"));
1990
+ if (existsSync5(PRIVATE_MODE_PATH)) {
1991
+ const state = JSON.parse(readFileSync5(PRIVATE_MODE_PATH, "utf-8"));
181
1992
  return state.enabled === false;
182
1993
  }
183
1994
  } catch {
@@ -188,30 +1999,30 @@ var OC_AGENT_ID = "oc-lesa-mini";
188
1999
  function syncRawDataToLdm(logger) {
189
2000
  try {
190
2001
  const paths = ensureLdm(OC_AGENT_ID);
191
- const HOME2 = process.env.HOME || "";
192
- const ocDir = join2(HOME2, ".openclaw");
193
- const sessionsDir = join2(ocDir, "agents", "main", "sessions");
194
- if (existsSync2(sessionsDir)) {
2002
+ const HOME3 = process.env.HOME || "";
2003
+ const ocDir = join5(HOME3, ".openclaw");
2004
+ const sessionsDir = join5(ocDir, "agents", "main", "sessions");
2005
+ if (existsSync5(sessionsDir)) {
195
2006
  let copied = 0;
196
- for (const file of readdirSync(sessionsDir)) {
2007
+ for (const file of readdirSync3(sessionsDir)) {
197
2008
  if (!file.endsWith(".jsonl")) continue;
198
- const src = join2(sessionsDir, file);
199
- const dest = join2(paths.transcripts, file);
2009
+ const src = join5(sessionsDir, file);
2010
+ const dest = join5(paths.transcripts, file);
200
2011
  if (idempotentCopy(src, dest)) copied++;
201
2012
  }
202
2013
  if (copied > 0) logger.info(`memory-crystal: synced ${copied} session files to LDM`);
203
2014
  }
204
- const workspaceDir = join2(ocDir, "workspace");
205
- if (existsSync2(workspaceDir)) {
2015
+ const workspaceDir = join5(ocDir, "workspace");
2016
+ if (existsSync5(workspaceDir)) {
206
2017
  syncDirRecursive(workspaceDir, paths.workspace, ".md");
207
2018
  }
208
- const dailyDir = join2(ocDir, "workspace", "memory");
209
- if (existsSync2(dailyDir)) {
210
- for (const file of readdirSync(dailyDir)) {
2019
+ const dailyDir = join5(ocDir, "workspace", "memory");
2020
+ if (existsSync5(dailyDir)) {
2021
+ for (const file of readdirSync3(dailyDir)) {
211
2022
  if (!file.endsWith(".md")) continue;
212
2023
  if (/^\d{4}-\d{2}-\d{2}\.md$/.test(file)) {
213
- const src = join2(dailyDir, file);
214
- const dest = join2(paths.daily, file);
2024
+ const src = join5(dailyDir, file);
2025
+ const dest = join5(paths.daily, file);
215
2026
  idempotentCopy(src, dest);
216
2027
  }
217
2028
  }
@@ -222,24 +2033,24 @@ function syncRawDataToLdm(logger) {
222
2033
  }
223
2034
  function idempotentCopy(src, dest) {
224
2035
  try {
225
- if (existsSync2(dest)) {
226
- const srcStat = statSync(src);
227
- const destStat = statSync(dest);
2036
+ if (existsSync5(dest)) {
2037
+ const srcStat = statSync2(src);
2038
+ const destStat = statSync2(dest);
228
2039
  if (srcStat.size === destStat.size && srcStat.mtimeMs <= destStat.mtimeMs) return false;
229
2040
  }
230
- const destDir = join2(dest, "..");
231
- if (!existsSync2(destDir)) mkdirSync2(destDir, { recursive: true });
232
- copyFileSync(src, dest);
2041
+ const destDir = join5(dest, "..");
2042
+ if (!existsSync5(destDir)) mkdirSync4(destDir, { recursive: true });
2043
+ copyFileSync2(src, dest);
233
2044
  return true;
234
2045
  } catch {
235
2046
  return false;
236
2047
  }
237
2048
  }
238
2049
  function syncDirRecursive(srcDir, destDir, ext) {
239
- if (!existsSync2(destDir)) mkdirSync2(destDir, { recursive: true });
240
- for (const entry of readdirSync(srcDir, { withFileTypes: true })) {
241
- const srcPath = join2(srcDir, entry.name);
242
- const destPath = join2(destDir, entry.name);
2050
+ if (!existsSync5(destDir)) mkdirSync4(destDir, { recursive: true });
2051
+ for (const entry of readdirSync3(srcDir, { withFileTypes: true })) {
2052
+ const srcPath = join5(srcDir, entry.name);
2053
+ const destPath = join5(destDir, entry.name);
243
2054
  if (entry.isDirectory()) {
244
2055
  syncDirRecursive(srcPath, destPath, ext);
245
2056
  } else if (entry.name.endsWith(ext)) {
@@ -251,8 +2062,8 @@ var WORKSPACE_WATERMARK_FILE = "workspace-memory-watermarks.json";
251
2062
  function loadWatermarks() {
252
2063
  try {
253
2064
  const path = resolveStatePath(WORKSPACE_WATERMARK_FILE);
254
- if (existsSync2(path)) {
255
- return JSON.parse(readFileSync2(path, "utf-8"));
2065
+ if (existsSync5(path)) {
2066
+ return JSON.parse(readFileSync5(path, "utf-8"));
256
2067
  }
257
2068
  } catch {
258
2069
  }
@@ -260,7 +2071,7 @@ function loadWatermarks() {
260
2071
  }
261
2072
  function saveWatermarks(watermarks) {
262
2073
  const path = stateWritePath(WORKSPACE_WATERMARK_FILE);
263
- writeFileSync2(path, JSON.stringify(watermarks, null, 2), "utf-8");
2074
+ writeFileSync3(path, JSON.stringify(watermarks, null, 2), "utf-8");
264
2075
  }
265
2076
  var VALID_CATEGORIES = /* @__PURE__ */ new Set([
266
2077
  "fact",
@@ -291,16 +2102,16 @@ function parseFrontmatterType(content) {
291
2102
  return null;
292
2103
  }
293
2104
  function collectWorkspaceMemoryFiles() {
294
- const HOME2 = process.env.HOME || "";
295
- const workspaceDir = join2(HOME2, ".openclaw", "workspace");
2105
+ const HOME3 = process.env.HOME || "";
2106
+ const workspaceDir = join5(HOME3, ".openclaw", "workspace");
296
2107
  const files = [];
297
- const memoryMd = join2(workspaceDir, "MEMORY.md");
298
- if (existsSync2(memoryMd)) files.push(memoryMd);
299
- const memoryDir = join2(workspaceDir, "memory");
300
- if (existsSync2(memoryDir)) {
301
- for (const entry of readdirSync(memoryDir)) {
2108
+ const memoryMd = join5(workspaceDir, "MEMORY.md");
2109
+ if (existsSync5(memoryMd)) files.push(memoryMd);
2110
+ const memoryDir = join5(workspaceDir, "memory");
2111
+ if (existsSync5(memoryDir)) {
2112
+ for (const entry of readdirSync3(memoryDir)) {
302
2113
  if (entry.endsWith(".md")) {
303
- files.push(join2(memoryDir, entry));
2114
+ files.push(join5(memoryDir, entry));
304
2115
  }
305
2116
  }
306
2117
  }
@@ -312,17 +2123,17 @@ async function syncWorkspaceMemory(crystal, agentId, logger) {
312
2123
  let ingested = 0;
313
2124
  for (const filePath of files) {
314
2125
  try {
315
- const stat = statSync(filePath);
2126
+ const stat = statSync2(filePath);
316
2127
  const lastMtime = watermarks[filePath] || 0;
317
2128
  if (stat.mtimeMs <= lastMtime) continue;
318
- const content = readFileSync2(filePath, "utf-8");
2129
+ const content = readFileSync5(filePath, "utf-8");
319
2130
  if (!content || content.trim().length < 50) continue;
320
2131
  const category = parseFrontmatterType(content) || "fact";
321
2132
  await crystal.remember(content, category);
322
2133
  ingested++;
323
2134
  watermarks[filePath] = stat.mtimeMs;
324
2135
  } catch (err) {
325
- logger.warn(`memory-crystal: workspace sync skipped ${basename2(filePath)}: ${err.message}`);
2136
+ logger.warn(`memory-crystal: workspace sync skipped ${basename3(filePath)}: ${err.message}`);
326
2137
  }
327
2138
  }
328
2139
  if (ingested > 0) {