@wipcomputer/memory-crystal 0.7.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (157) hide show
  1. package/.env.example +20 -0
  2. package/CHANGELOG.md +367 -0
  3. package/LICENSE +21 -0
  4. package/README-ENTERPRISE.md +226 -0
  5. package/README.md +127 -0
  6. package/RELAY.md +199 -0
  7. package/TECHNICAL.md +628 -0
  8. package/_trash/RELEASE-NOTES-v0-7-4.md +64 -0
  9. package/_trash/RELEASE-NOTES-v0-7-5.md +19 -0
  10. package/cloud/README.md +116 -0
  11. package/cloud/docs/gpt-system-instructions.md +69 -0
  12. package/cloud/migrations/0001_init.sql +52 -0
  13. package/dist/bridge.d.ts +7 -0
  14. package/dist/bridge.js +14 -0
  15. package/dist/bulk-copy.d.ts +17 -0
  16. package/dist/bulk-copy.js +90 -0
  17. package/dist/cc-hook.d.ts +8 -0
  18. package/dist/cc-hook.js +368 -0
  19. package/dist/cc-poller.d.ts +1 -0
  20. package/dist/cc-poller.js +550 -0
  21. package/dist/chunk-25LXQJ4Z.js +110 -0
  22. package/dist/chunk-2DRXIRQW.js +97 -0
  23. package/dist/chunk-2ZNH5F6E.js +1281 -0
  24. package/dist/chunk-3G3SFYYI.js +288 -0
  25. package/dist/chunk-3RG5ZIWI.js +10 -0
  26. package/dist/chunk-3S6TI23B.js +97 -0
  27. package/dist/chunk-3VFIJYS4.js +818 -0
  28. package/dist/chunk-52QE3YI3.js +1169 -0
  29. package/dist/chunk-57RP3DIN.js +1205 -0
  30. package/dist/chunk-5HSZ4W2P.js +62 -0
  31. package/dist/chunk-645IPXW3.js +290 -0
  32. package/dist/chunk-7A7ELD4C.js +1205 -0
  33. package/dist/chunk-7FYY4GZM.js +1205 -0
  34. package/dist/chunk-7IUE7ODU.js +254 -0
  35. package/dist/chunk-7RMLKZIS.js +108 -0
  36. package/dist/chunk-AA3OPP4Z.js +432 -0
  37. package/dist/chunk-ASSZDR6I.js +108 -0
  38. package/dist/chunk-AYRJVWUC.js +1205 -0
  39. package/dist/chunk-CCYI5O3D.js +148 -0
  40. package/dist/chunk-D3I3ZSE2.js +411 -0
  41. package/dist/chunk-DACSKLY6.js +219 -0
  42. package/dist/chunk-DW5B4BL7.js +108 -0
  43. package/dist/chunk-EKSACBTJ.js +1070 -0
  44. package/dist/chunk-EXEZZADG.js +248 -0
  45. package/dist/chunk-F3Y7EL7K.js +83 -0
  46. package/dist/chunk-FHRZNOMW.js +1205 -0
  47. package/dist/chunk-IM7N24MT.js +129 -0
  48. package/dist/chunk-IPNYIXFK.js +1178 -0
  49. package/dist/chunk-J7MRSZIO.js +167 -0
  50. package/dist/chunk-JITKI2OI.js +106 -0
  51. package/dist/chunk-JWZXYVET.js +1068 -0
  52. package/dist/chunk-KCQUXVYT.js +108 -0
  53. package/dist/chunk-KOQ43OX6.js +1281 -0
  54. package/dist/chunk-KYVWO6ZM.js +1069 -0
  55. package/dist/chunk-L3VHARQH.js +413 -0
  56. package/dist/chunk-LBWDS6BE.js +288 -0
  57. package/dist/chunk-LOVAHSQV.js +411 -0
  58. package/dist/chunk-LQOYCAGG.js +446 -0
  59. package/dist/chunk-LWAIPJ2W.js +146 -0
  60. package/dist/chunk-M5DHKW7M.js +127 -0
  61. package/dist/chunk-MBKCIJHM.js +1328 -0
  62. package/dist/chunk-MK42FMEG.js +147 -0
  63. package/dist/chunk-MOBMYHKL.js +1205 -0
  64. package/dist/chunk-MPLTNMRG.js +67 -0
  65. package/dist/chunk-NIJCVN3O.js +147 -0
  66. package/dist/chunk-NZCFSZQ7.js +1205 -0
  67. package/dist/chunk-O2UITJGH.js +465 -0
  68. package/dist/chunk-OCRA44AZ.js +108 -0
  69. package/dist/chunk-P3KJR66H.js +117 -0
  70. package/dist/chunk-PEK6JH65.js +432 -0
  71. package/dist/chunk-PJ6FFKEX.js +77 -0
  72. package/dist/chunk-PLUBBZYR.js +800 -0
  73. package/dist/chunk-PNKVD2UK.js +26 -0
  74. package/dist/chunk-PSQZURHO.js +229 -0
  75. package/dist/chunk-SGL6ISBJ.js +1061 -0
  76. package/dist/chunk-SJABZZT5.js +97 -0
  77. package/dist/chunk-TD3P3K32.js +1199 -0
  78. package/dist/chunk-TMDZJJKV.js +288 -0
  79. package/dist/chunk-UNHVZB5G.js +411 -0
  80. package/dist/chunk-VAFTWSTE.js +1061 -0
  81. package/dist/chunk-VNFXFQBB.js +217 -0
  82. package/dist/chunk-X3GVFKSJ.js +1205 -0
  83. package/dist/chunk-XZ3S56RQ.js +1061 -0
  84. package/dist/chunk-Y72C7F6O.js +148 -0
  85. package/dist/chunk-YLICP577.js +1205 -0
  86. package/dist/chunk-YX6AXLVK.js +159 -0
  87. package/dist/chunk-ZCQYHTNU.js +146 -0
  88. package/dist/cli.d.ts +1 -0
  89. package/dist/cli.js +1105 -0
  90. package/dist/cloud-crystal.js +6 -0
  91. package/dist/core.d.ts +232 -0
  92. package/dist/core.js +12 -0
  93. package/dist/crypto.d.ts +20 -0
  94. package/dist/crypto.js +27 -0
  95. package/dist/crystal-capture.sh +29 -0
  96. package/dist/crystal-serve.d.ts +4 -0
  97. package/dist/crystal-serve.js +252 -0
  98. package/dist/dev-update-SZ2Z4WCQ.js +6 -0
  99. package/dist/discover.d.ts +30 -0
  100. package/dist/discover.js +177 -0
  101. package/dist/doctor.d.ts +9 -0
  102. package/dist/doctor.js +334 -0
  103. package/dist/dream-weaver.d.ts +8 -0
  104. package/dist/dream-weaver.js +56 -0
  105. package/dist/file-sync.d.ts +48 -0
  106. package/dist/file-sync.js +18 -0
  107. package/dist/installer.d.ts +61 -0
  108. package/dist/installer.js +618 -0
  109. package/dist/ldm-backup.sh +116 -0
  110. package/dist/ldm.d.ts +50 -0
  111. package/dist/ldm.js +32 -0
  112. package/dist/mcp-server.d.ts +1 -0
  113. package/dist/mcp-server.js +265 -0
  114. package/dist/migrate.d.ts +1 -0
  115. package/dist/migrate.js +89 -0
  116. package/dist/mirror-sync.d.ts +1 -0
  117. package/dist/mirror-sync.js +159 -0
  118. package/dist/oc-backfill.d.ts +19 -0
  119. package/dist/oc-backfill.js +74 -0
  120. package/dist/openclaw.d.ts +5 -0
  121. package/dist/openclaw.js +423 -0
  122. package/dist/pair.d.ts +4 -0
  123. package/dist/pair.js +75 -0
  124. package/dist/poller.d.ts +1 -0
  125. package/dist/poller.js +634 -0
  126. package/dist/role.d.ts +24 -0
  127. package/dist/role.js +13 -0
  128. package/dist/search-pipeline-4K4OJSSS.js +255 -0
  129. package/dist/search-pipeline-4PRS6LI7.js +280 -0
  130. package/dist/search-pipeline-7UJMXPLO.js +280 -0
  131. package/dist/search-pipeline-DQTRLGBH.js +74 -0
  132. package/dist/search-pipeline-HNG37REH.js +282 -0
  133. package/dist/search-pipeline-IZFPLBUB.js +280 -0
  134. package/dist/search-pipeline-MID6F26Q.js +73 -0
  135. package/dist/search-pipeline-N52JZFNN.js +282 -0
  136. package/dist/search-pipeline-OPB2PRQQ.js +280 -0
  137. package/dist/search-pipeline-VXTE5HAD.js +262 -0
  138. package/dist/staging.d.ts +29 -0
  139. package/dist/staging.js +21 -0
  140. package/dist/summarize.d.ts +19 -0
  141. package/dist/summarize.js +10 -0
  142. package/dist/worker-demo.js +186 -0
  143. package/dist/worker-mcp.js +404 -0
  144. package/dist/worker.js +137 -0
  145. package/migrations/0001_init.sql +51 -0
  146. package/migrations/0002_cloud_storage.sql +49 -0
  147. package/openclaw.plugin.json +11 -0
  148. package/package.json +57 -0
  149. package/scripts/crystal-capture 2.sh +29 -0
  150. package/scripts/crystal-capture.sh +29 -0
  151. package/scripts/deploy-cloud 2.sh +153 -0
  152. package/scripts/deploy-cloud.sh +153 -0
  153. package/scripts/ldm-backup.sh +116 -0
  154. package/scripts/migrate-lance-to-sqlite.mjs +217 -0
  155. package/skills/memory/SKILL.md +427 -0
  156. package/wrangler-demo.toml +8 -0
  157. package/wrangler-mcp.toml +24 -0
@@ -0,0 +1,280 @@
1
+ // src/llm.ts
2
+ import { existsSync, readFileSync } from "fs";
3
+ import { join } from "path";
4
+ import { homedir } from "os";
5
+ import { execSync } from "child_process";
6
+ var expansionCache = /* @__PURE__ */ new Map();
7
+ var detectedProvider = null;
8
+ var detectionDone = false;
9
+ function getOpSecret(itemName, fieldLabel) {
10
+ try {
11
+ const saTokenPath = join(homedir(), ".openclaw/secrets/op-sa-token");
12
+ if (!existsSync(saTokenPath)) return void 0;
13
+ const saToken = readFileSync(saTokenPath, "utf-8").trim();
14
+ const result = execSync(
15
+ `OP_SERVICE_ACCOUNT_TOKEN="${saToken}" op item get "${itemName}" --vault "Agent Secrets" --fields "${fieldLabel}" --reveal`,
16
+ { encoding: "utf-8", timeout: 5e3, stdio: ["pipe", "pipe", "pipe"] }
17
+ ).trim();
18
+ return result || void 0;
19
+ } catch {
20
+ return void 0;
21
+ }
22
+ }
23
+ async function detectProvider() {
24
+ if (detectionDone && detectedProvider) return detectedProvider;
25
+ detectionDone = true;
26
+ try {
27
+ const resp = await fetch("http://localhost:8080/v1/models", { signal: AbortSignal.timeout(1e3) });
28
+ if (resp.ok) {
29
+ const data = await resp.json();
30
+ const model = data?.data?.[0]?.id || "default";
31
+ detectedProvider = { provider: "mlx", baseURL: "http://localhost:8080/v1", apiKey: "not-needed", model };
32
+ process.stderr.write(`[memory-crystal] LLM provider: MLX (${model})
33
+ `);
34
+ return detectedProvider;
35
+ }
36
+ } catch {
37
+ }
38
+ try {
39
+ const resp = await fetch("http://localhost:11434/api/tags", { signal: AbortSignal.timeout(1e3) });
40
+ if (resp.ok) {
41
+ const data = await resp.json();
42
+ const models = data?.models || [];
43
+ const embeddingOnly = ["nomic-embed-text", "mxbai-embed", "all-minilm", "snowflake-arctic-embed"];
44
+ const chatModel = models.find((m) => !embeddingOnly.some((e) => m.name.startsWith(e)));
45
+ if (chatModel) {
46
+ detectedProvider = { provider: "ollama", baseURL: "http://localhost:11434/v1", apiKey: "ollama", model: chatModel.name };
47
+ process.stderr.write(`[memory-crystal] LLM provider: Ollama (${chatModel.name})
48
+ `);
49
+ return detectedProvider;
50
+ }
51
+ }
52
+ } catch {
53
+ }
54
+ const anthropicKey = process.env.ANTHROPIC_API_KEY || getOpSecret("Anthropic Auth Token - remote bunkers", "api key");
55
+ if (anthropicKey) {
56
+ detectedProvider = { provider: "anthropic", baseURL: "https://api.anthropic.com", apiKey: anthropicKey, model: "claude-haiku-4-5-20251001" };
57
+ process.stderr.write("[memory-crystal] LLM provider: Anthropic API\n");
58
+ return detectedProvider;
59
+ }
60
+ const openaiKey = process.env.OPENAI_API_KEY || getOpSecret("OpenAI API", "api key");
61
+ if (openaiKey) {
62
+ detectedProvider = { provider: "openai", baseURL: "https://api.openai.com/v1", apiKey: openaiKey, model: "gpt-4o-mini" };
63
+ process.stderr.write("[memory-crystal] LLM provider: OpenAI API\n");
64
+ return detectedProvider;
65
+ }
66
+ detectedProvider = { provider: "none", baseURL: "", apiKey: "", model: "" };
67
+ process.stderr.write("[memory-crystal] LLM provider: none (deep search unavailable)\n");
68
+ return detectedProvider;
69
+ }
70
+ async function chatComplete(config, messages, maxTokens = 300) {
71
+ if (config.provider === "anthropic") {
72
+ return anthropicComplete(config, messages, maxTokens);
73
+ }
74
+ const resp = await fetch(`${config.baseURL}/chat/completions`, {
75
+ method: "POST",
76
+ headers: {
77
+ "Content-Type": "application/json",
78
+ "Authorization": `Bearer ${config.apiKey}`
79
+ },
80
+ body: JSON.stringify({
81
+ model: config.model,
82
+ messages,
83
+ max_tokens: maxTokens,
84
+ temperature: 0.7
85
+ })
86
+ });
87
+ if (!resp.ok) throw new Error(`LLM request failed: ${resp.status}`);
88
+ const data = await resp.json();
89
+ return data.choices?.[0]?.message?.content || "";
90
+ }
91
+ async function anthropicComplete(config, messages, maxTokens) {
92
+ const systemMsg = messages.find((m) => m.role === "system");
93
+ const userMessages = messages.filter((m) => m.role !== "system");
94
+ const body = {
95
+ model: config.model,
96
+ max_tokens: maxTokens,
97
+ messages: userMessages
98
+ };
99
+ if (systemMsg) body.system = systemMsg.content;
100
+ const resp = await fetch("https://api.anthropic.com/v1/messages", {
101
+ method: "POST",
102
+ headers: {
103
+ "Content-Type": "application/json",
104
+ "x-api-key": config.apiKey,
105
+ "anthropic-version": "2023-06-01"
106
+ },
107
+ body: JSON.stringify(body)
108
+ });
109
+ if (!resp.ok) throw new Error(`Anthropic request failed: ${resp.status}`);
110
+ const data = await resp.json();
111
+ return data.content?.[0]?.text || "";
112
+ }
113
+ var EXPAND_PROMPT = `You are a search query expander. Given a search query, generate exactly 3 variations to improve search recall.
114
+
115
+ Output exactly 3 lines in this format (no other text):
116
+ lex: <keyword-focused variation for full-text search>
117
+ vec: <semantic variation rephrased for embedding similarity>
118
+ hyde: <hypothetical document snippet that would answer this query>
119
+
120
+ Rules:
121
+ - Each variation must contain at least one term from the original query
122
+ - Keep variations concise (under 30 words each)
123
+ - lex should use specific keywords and synonyms
124
+ - vec should rephrase the intent naturally
125
+ - hyde should be a short passage as if answering the query`;
126
+ async function expandQuery(query) {
127
+ const cached = expansionCache.get(query);
128
+ if (cached) return cached;
129
+ const config = await detectProvider();
130
+ if (config.provider === "none") return [];
131
+ try {
132
+ const result = await chatComplete(config, [
133
+ { role: "system", content: EXPAND_PROMPT },
134
+ { role: "user", content: query }
135
+ ], 300);
136
+ const lines = result.trim().split("\n");
137
+ const queryLower = query.toLowerCase();
138
+ const queryTerms = queryLower.replace(/[^a-z0-9\s]/g, " ").split(/\s+/).filter(Boolean);
139
+ const hasQueryTerm = (text) => {
140
+ const lower = text.toLowerCase();
141
+ if (queryTerms.length === 0) return true;
142
+ return queryTerms.some((term) => lower.includes(term));
143
+ };
144
+ const variations = lines.map((line) => {
145
+ const colonIdx = line.indexOf(":");
146
+ if (colonIdx === -1) return null;
147
+ const type = line.slice(0, colonIdx).trim();
148
+ if (type !== "lex" && type !== "vec" && type !== "hyde") return null;
149
+ const text = line.slice(colonIdx + 1).trim();
150
+ if (!text || !hasQueryTerm(text)) return null;
151
+ return { type, text };
152
+ }).filter((v) => v !== null);
153
+ if (variations.length > 0) {
154
+ expansionCache.set(query, variations);
155
+ return variations;
156
+ }
157
+ } catch (err) {
158
+ process.stderr.write(`[memory-crystal] Query expansion failed: ${err.message}
159
+ `);
160
+ }
161
+ const fallback = [
162
+ { type: "lex", text: query },
163
+ { type: "vec", text: query },
164
+ { type: "hyde", text: `Information about ${query}` }
165
+ ];
166
+ return fallback;
167
+ }
168
+ var RERANK_PROMPT = `You are a search result re-ranker. Given a query and a list of text passages, rate each passage's relevance to the query.
169
+
170
+ Output one line per passage in this exact format:
171
+ <index>: <score>
172
+
173
+ Where index is the passage number (0-based) and score is a float from 0.0 to 1.0.
174
+ - 1.0 = perfectly relevant, directly answers the query
175
+ - 0.7 = highly relevant, closely related
176
+ - 0.4 = somewhat relevant, tangentially related
177
+ - 0.1 = barely relevant
178
+ - 0.0 = not relevant at all
179
+
180
+ Rate ALL passages. Output nothing else.`;
181
+ async function rerankResults(query, passages) {
182
+ const config = await detectProvider();
183
+ if (config.provider === "none") {
184
+ return passages.map((_, i) => ({ index: i, score: 1 - i * 0.01 }));
185
+ }
186
+ try {
187
+ const passageList = passages.map((p, i) => `[${i}] ${p.slice(0, 500)}`).join("\n\n");
188
+ const result = await chatComplete(config, [
189
+ { role: "system", content: RERANK_PROMPT },
190
+ { role: "user", content: `Query: ${query}
191
+
192
+ Passages:
193
+ ${passageList}` }
194
+ ], 200);
195
+ const results = [];
196
+ for (const line of result.trim().split("\n")) {
197
+ const match = line.match(/^(\d+):\s*([\d.]+)/);
198
+ if (match) {
199
+ results.push({ index: parseInt(match[1]), score: parseFloat(match[2]) });
200
+ }
201
+ }
202
+ const scored = new Set(results.map((r) => r.index));
203
+ for (let i = 0; i < passages.length; i++) {
204
+ if (!scored.has(i)) results.push({ index: i, score: 0 });
205
+ }
206
+ return results.sort((a, b) => b.score - a.score);
207
+ } catch (err) {
208
+ process.stderr.write(`[memory-crystal] Reranking failed: ${err.message}
209
+ `);
210
+ return passages.map((_, i) => ({ index: i, score: 1 - i * 0.01 }));
211
+ }
212
+ }
213
+
214
+ // src/search-pipeline.ts
215
+ var STRONG_SIGNAL_MIN_SCORE = 0.85;
216
+ var STRONG_SIGNAL_MIN_GAP = 0.15;
217
+ var RERANK_CANDIDATE_LIMIT = 40;
218
+ async function deepSearch(crystal, query, options = {}) {
219
+ const limit = options.limit || 5;
220
+ const filter = options.filter;
221
+ const provider = await detectProvider();
222
+ if (provider.provider === "none") {
223
+ return crystal.search(query, limit, filter);
224
+ }
225
+ const db = crystal.sqliteDb;
226
+ if (!db) return crystal.search(query, limit, filter);
227
+ const sinceDate = filter?.since ? crystal.parseSince(filter.since) : void 0;
228
+ const internalFilter = { ...filter, sinceDate };
229
+ const initialFts = crystal.searchFTS(query, 20, internalFilter);
230
+ const topScore = initialFts[0]?.score ?? 0;
231
+ const secondScore = initialFts[1]?.score ?? 0;
232
+ const hasStrongSignal = initialFts.length > 0 && topScore >= STRONG_SIGNAL_MIN_SCORE && topScore - secondScore >= STRONG_SIGNAL_MIN_GAP;
233
+ const expanded = hasStrongSignal ? [] : await expandQuery(query);
234
+ const allResultLists = [];
235
+ if (initialFts.length > 0) allResultLists.push(initialFts);
236
+ const [queryEmbedding] = await crystal.embed([query]);
237
+ const originalVec = crystal.searchVec(queryEmbedding, 30, internalFilter);
238
+ if (originalVec.length > 0) allResultLists.push(originalVec);
239
+ for (const variation of expanded) {
240
+ if (variation.type === "lex") {
241
+ const ftsResults = crystal.searchFTS(variation.text, 20, internalFilter);
242
+ if (ftsResults.length > 0) allResultLists.push(ftsResults);
243
+ } else {
244
+ const [embedding] = await crystal.embed([variation.text]);
245
+ const vecResults = crystal.searchVec(embedding, 20, internalFilter);
246
+ if (vecResults.length > 0) allResultLists.push(vecResults);
247
+ }
248
+ }
249
+ const weights = allResultLists.map((_, i) => i < 2 ? 2 : 1);
250
+ const fused = crystal.reciprocalRankFusion(allResultLists, weights);
251
+ const candidates = fused.slice(0, RERANK_CANDIDATE_LIMIT);
252
+ if (candidates.length === 0) return [];
253
+ const passages = candidates.map((c) => c.text.slice(0, 500));
254
+ const reranked = await rerankResults(query, passages);
255
+ const now = Date.now();
256
+ const blended = reranked.map((r) => {
257
+ const candidate = candidates[r.index];
258
+ if (!candidate) return null;
259
+ const rrfRank = r.index + 1;
260
+ let rrfWeight;
261
+ if (rrfRank <= 3) rrfWeight = 0.75;
262
+ else if (rrfRank <= 10) rrfWeight = 0.6;
263
+ else rrfWeight = 0.4;
264
+ const rrfScore = 1 / rrfRank;
265
+ const blendedScore = rrfWeight * rrfScore + (1 - rrfWeight) * r.score;
266
+ const ageDays = candidate.created_at ? (now - new Date(candidate.created_at).getTime()) / 864e5 : 0;
267
+ const recency = candidate.created_at ? crystal.recencyWeight(ageDays) : 1;
268
+ const finalScore = Math.min(blendedScore * recency * 8, 1);
269
+ const freshness = candidate.created_at ? crystal.freshnessLabel(ageDays) : void 0;
270
+ return {
271
+ ...candidate,
272
+ score: finalScore,
273
+ freshness
274
+ };
275
+ }).filter((r) => r !== null);
276
+ return blended.sort((a, b) => b.score - a.score).slice(0, limit);
277
+ }
278
+ export {
279
+ deepSearch
280
+ };
@@ -0,0 +1,262 @@
1
+ // src/llm.ts
2
+ var expansionCache = /* @__PURE__ */ new Map();
3
+ var detectedProvider = null;
4
+ var detectionDone = false;
5
+ async function detectProvider() {
6
+ if (detectionDone && detectedProvider) return detectedProvider;
7
+ detectionDone = true;
8
+ try {
9
+ const resp = await fetch("http://localhost:8080/v1/models", { signal: AbortSignal.timeout(1e3) });
10
+ if (resp.ok) {
11
+ const data = await resp.json();
12
+ const model = data?.data?.[0]?.id || "default";
13
+ detectedProvider = { provider: "mlx", baseURL: "http://localhost:8080/v1", apiKey: "not-needed", model };
14
+ process.stderr.write(`[memory-crystal] LLM provider: MLX (${model})
15
+ `);
16
+ return detectedProvider;
17
+ }
18
+ } catch {
19
+ }
20
+ try {
21
+ const resp = await fetch("http://localhost:11434/api/tags", { signal: AbortSignal.timeout(1e3) });
22
+ if (resp.ok) {
23
+ const data = await resp.json();
24
+ const models = data?.models || [];
25
+ const embeddingOnly = ["nomic-embed-text", "mxbai-embed", "all-minilm", "snowflake-arctic-embed"];
26
+ const chatModel = models.find((m) => !embeddingOnly.some((e) => m.name.startsWith(e)));
27
+ if (chatModel) {
28
+ detectedProvider = { provider: "ollama", baseURL: "http://localhost:11434/v1", apiKey: "ollama", model: chatModel.name };
29
+ process.stderr.write(`[memory-crystal] LLM provider: Ollama (${chatModel.name})
30
+ `);
31
+ return detectedProvider;
32
+ }
33
+ }
34
+ } catch {
35
+ }
36
+ const anthropicKey = process.env.ANTHROPIC_API_KEY;
37
+ if (anthropicKey) {
38
+ detectedProvider = { provider: "anthropic", baseURL: "https://api.anthropic.com", apiKey: anthropicKey, model: "claude-haiku-4-5-20251001" };
39
+ process.stderr.write("[memory-crystal] LLM provider: Anthropic API\n");
40
+ return detectedProvider;
41
+ }
42
+ const openaiKey = process.env.OPENAI_API_KEY;
43
+ if (openaiKey) {
44
+ detectedProvider = { provider: "openai", baseURL: "https://api.openai.com/v1", apiKey: openaiKey, model: "gpt-4o-mini" };
45
+ process.stderr.write("[memory-crystal] LLM provider: OpenAI API\n");
46
+ return detectedProvider;
47
+ }
48
+ detectedProvider = { provider: "none", baseURL: "", apiKey: "", model: "" };
49
+ process.stderr.write("[memory-crystal] LLM provider: none (deep search unavailable)\n");
50
+ return detectedProvider;
51
+ }
52
+ async function chatComplete(config, messages, maxTokens = 300) {
53
+ if (config.provider === "anthropic") {
54
+ return anthropicComplete(config, messages, maxTokens);
55
+ }
56
+ const resp = await fetch(`${config.baseURL}/chat/completions`, {
57
+ method: "POST",
58
+ headers: {
59
+ "Content-Type": "application/json",
60
+ "Authorization": `Bearer ${config.apiKey}`
61
+ },
62
+ body: JSON.stringify({
63
+ model: config.model,
64
+ messages,
65
+ max_tokens: maxTokens,
66
+ temperature: 0.7
67
+ })
68
+ });
69
+ if (!resp.ok) throw new Error(`LLM request failed: ${resp.status}`);
70
+ const data = await resp.json();
71
+ return data.choices?.[0]?.message?.content || "";
72
+ }
73
+ async function anthropicComplete(config, messages, maxTokens) {
74
+ const systemMsg = messages.find((m) => m.role === "system");
75
+ const userMessages = messages.filter((m) => m.role !== "system");
76
+ const body = {
77
+ model: config.model,
78
+ max_tokens: maxTokens,
79
+ messages: userMessages
80
+ };
81
+ if (systemMsg) body.system = systemMsg.content;
82
+ const resp = await fetch("https://api.anthropic.com/v1/messages", {
83
+ method: "POST",
84
+ headers: {
85
+ "Content-Type": "application/json",
86
+ "x-api-key": config.apiKey,
87
+ "anthropic-version": "2023-06-01"
88
+ },
89
+ body: JSON.stringify(body)
90
+ });
91
+ if (!resp.ok) throw new Error(`Anthropic request failed: ${resp.status}`);
92
+ const data = await resp.json();
93
+ return data.content?.[0]?.text || "";
94
+ }
95
+ var EXPAND_PROMPT = `You are a search query expander. Given a search query, generate exactly 3 variations to improve search recall.
96
+
97
+ Output exactly 3 lines in this format (no other text):
98
+ lex: <keyword-focused variation for full-text search>
99
+ vec: <semantic variation rephrased for embedding similarity>
100
+ hyde: <hypothetical document snippet that would answer this query>
101
+
102
+ Rules:
103
+ - Each variation must contain at least one term from the original query
104
+ - Keep variations concise (under 30 words each)
105
+ - lex should use specific keywords and synonyms
106
+ - vec should rephrase the intent naturally
107
+ - hyde should be a short passage as if answering the query`;
108
+ async function expandQuery(query) {
109
+ const cached = expansionCache.get(query);
110
+ if (cached) return cached;
111
+ const config = await detectProvider();
112
+ if (config.provider === "none") return [];
113
+ try {
114
+ const result = await chatComplete(config, [
115
+ { role: "system", content: EXPAND_PROMPT },
116
+ { role: "user", content: query }
117
+ ], 300);
118
+ const lines = result.trim().split("\n");
119
+ const queryLower = query.toLowerCase();
120
+ const queryTerms = queryLower.replace(/[^a-z0-9\s]/g, " ").split(/\s+/).filter(Boolean);
121
+ const hasQueryTerm = (text) => {
122
+ const lower = text.toLowerCase();
123
+ if (queryTerms.length === 0) return true;
124
+ return queryTerms.some((term) => lower.includes(term));
125
+ };
126
+ const variations = lines.map((line) => {
127
+ const colonIdx = line.indexOf(":");
128
+ if (colonIdx === -1) return null;
129
+ const type = line.slice(0, colonIdx).trim();
130
+ if (type !== "lex" && type !== "vec" && type !== "hyde") return null;
131
+ const text = line.slice(colonIdx + 1).trim();
132
+ if (!text || !hasQueryTerm(text)) return null;
133
+ return { type, text };
134
+ }).filter((v) => v !== null);
135
+ if (variations.length > 0) {
136
+ expansionCache.set(query, variations);
137
+ return variations;
138
+ }
139
+ } catch (err) {
140
+ process.stderr.write(`[memory-crystal] Query expansion failed: ${err.message}
141
+ `);
142
+ }
143
+ const fallback = [
144
+ { type: "lex", text: query },
145
+ { type: "vec", text: query },
146
+ { type: "hyde", text: `Information about ${query}` }
147
+ ];
148
+ return fallback;
149
+ }
150
+ var RERANK_PROMPT = `You are a search result re-ranker. Given a query and a list of text passages, rate each passage's relevance to the query.
151
+
152
+ Output one line per passage in this exact format:
153
+ <index>: <score>
154
+
155
+ Where index is the passage number (0-based) and score is a float from 0.0 to 1.0.
156
+ - 1.0 = perfectly relevant, directly answers the query
157
+ - 0.7 = highly relevant, closely related
158
+ - 0.4 = somewhat relevant, tangentially related
159
+ - 0.1 = barely relevant
160
+ - 0.0 = not relevant at all
161
+
162
+ Rate ALL passages. Output nothing else.`;
163
+ async function rerankResults(query, passages) {
164
+ const config = await detectProvider();
165
+ if (config.provider === "none") {
166
+ return passages.map((_, i) => ({ index: i, score: 1 - i * 0.01 }));
167
+ }
168
+ try {
169
+ const passageList = passages.map((p, i) => `[${i}] ${p.slice(0, 500)}`).join("\n\n");
170
+ const result = await chatComplete(config, [
171
+ { role: "system", content: RERANK_PROMPT },
172
+ { role: "user", content: `Query: ${query}
173
+
174
+ Passages:
175
+ ${passageList}` }
176
+ ], 200);
177
+ const results = [];
178
+ for (const line of result.trim().split("\n")) {
179
+ const match = line.match(/^(\d+):\s*([\d.]+)/);
180
+ if (match) {
181
+ results.push({ index: parseInt(match[1]), score: parseFloat(match[2]) });
182
+ }
183
+ }
184
+ const scored = new Set(results.map((r) => r.index));
185
+ for (let i = 0; i < passages.length; i++) {
186
+ if (!scored.has(i)) results.push({ index: i, score: 0 });
187
+ }
188
+ return results.sort((a, b) => b.score - a.score);
189
+ } catch (err) {
190
+ process.stderr.write(`[memory-crystal] Reranking failed: ${err.message}
191
+ `);
192
+ return passages.map((_, i) => ({ index: i, score: 1 - i * 0.01 }));
193
+ }
194
+ }
195
+
196
+ // src/search-pipeline.ts
197
+ var STRONG_SIGNAL_MIN_SCORE = 0.85;
198
+ var STRONG_SIGNAL_MIN_GAP = 0.15;
199
+ var RERANK_CANDIDATE_LIMIT = 40;
200
+ async function deepSearch(crystal, query, options = {}) {
201
+ const limit = options.limit || 5;
202
+ const filter = options.filter;
203
+ const provider = await detectProvider();
204
+ if (provider.provider === "none") {
205
+ return crystal.search(query, limit, filter);
206
+ }
207
+ const db = crystal.sqliteDb;
208
+ if (!db) return crystal.search(query, limit, filter);
209
+ const sinceDate = filter?.since ? crystal.parseSince(filter.since) : void 0;
210
+ const internalFilter = { ...filter, sinceDate };
211
+ const initialFts = crystal.searchFTS(query, 20, internalFilter);
212
+ const topScore = initialFts[0]?.score ?? 0;
213
+ const secondScore = initialFts[1]?.score ?? 0;
214
+ const hasStrongSignal = initialFts.length > 0 && topScore >= STRONG_SIGNAL_MIN_SCORE && topScore - secondScore >= STRONG_SIGNAL_MIN_GAP;
215
+ const expanded = hasStrongSignal ? [] : await expandQuery(query);
216
+ const allResultLists = [];
217
+ if (initialFts.length > 0) allResultLists.push(initialFts);
218
+ const [queryEmbedding] = await crystal.embed([query]);
219
+ const originalVec = crystal.searchVec(queryEmbedding, 30, internalFilter);
220
+ if (originalVec.length > 0) allResultLists.push(originalVec);
221
+ for (const variation of expanded) {
222
+ if (variation.type === "lex") {
223
+ const ftsResults = crystal.searchFTS(variation.text, 20, internalFilter);
224
+ if (ftsResults.length > 0) allResultLists.push(ftsResults);
225
+ } else {
226
+ const [embedding] = await crystal.embed([variation.text]);
227
+ const vecResults = crystal.searchVec(embedding, 20, internalFilter);
228
+ if (vecResults.length > 0) allResultLists.push(vecResults);
229
+ }
230
+ }
231
+ const weights = allResultLists.map((_, i) => i < 2 ? 2 : 1);
232
+ const fused = crystal.reciprocalRankFusion(allResultLists, weights);
233
+ const candidates = fused.slice(0, RERANK_CANDIDATE_LIMIT);
234
+ if (candidates.length === 0) return [];
235
+ const passages = candidates.map((c) => c.text.slice(0, 500));
236
+ const reranked = await rerankResults(query, passages);
237
+ const now = Date.now();
238
+ const blended = reranked.map((r) => {
239
+ const candidate = candidates[r.index];
240
+ if (!candidate) return null;
241
+ const rrfRank = r.index + 1;
242
+ let rrfWeight;
243
+ if (rrfRank <= 3) rrfWeight = 0.75;
244
+ else if (rrfRank <= 10) rrfWeight = 0.6;
245
+ else rrfWeight = 0.4;
246
+ const rrfScore = 1 / rrfRank;
247
+ const blendedScore = rrfWeight * rrfScore + (1 - rrfWeight) * r.score;
248
+ const ageDays = candidate.created_at ? (now - new Date(candidate.created_at).getTime()) / 864e5 : 0;
249
+ const recency = candidate.created_at ? crystal.recencyWeight(ageDays) : 1;
250
+ const finalScore = Math.min(blendedScore * recency * 8, 1);
251
+ const freshness = candidate.created_at ? crystal.freshnessLabel(ageDays) : void 0;
252
+ return {
253
+ ...candidate,
254
+ score: finalScore,
255
+ freshness
256
+ };
257
+ }).filter((r) => r !== null);
258
+ return blended.sort((a, b) => b.score - a.score).slice(0, limit);
259
+ }
260
+ export {
261
+ deepSearch
262
+ };
@@ -0,0 +1,29 @@
1
+ interface StagingPaths {
2
+ root: string;
3
+ transcripts: string;
4
+ readyFile: string;
5
+ }
6
+ declare function stagingPaths(agentId: string): StagingPaths;
7
+ /** Ensure staging directories exist for an agent. */
8
+ declare function ensureStaging(agentId: string): StagingPaths;
9
+ /** Mark an agent's staging as ready for processing. */
10
+ declare function markReady(agentId: string): void;
11
+ /** Check if an agent ID is new (no existing LDM agent directory). */
12
+ declare function isNewAgent(agentId: string): boolean;
13
+ /** Check if an agent has staged data ready for processing. */
14
+ declare function hasStagedData(agentId: string): boolean;
15
+ /** List all agents with staged data ready for processing. */
16
+ declare function listStagedAgents(): string[];
17
+ interface StagingResult {
18
+ agentId: string;
19
+ transcriptsProcessed: number;
20
+ backfillChunks: number;
21
+ dreamWeaverRan: boolean;
22
+ durationMs: number;
23
+ }
24
+ /** Process a staged agent: backfill + dream-weave + move to live. */
25
+ declare function processStagedAgent(agentId: string): Promise<StagingResult>;
26
+ /** Process all staged agents. */
27
+ declare function processAllStaged(): Promise<StagingResult[]>;
28
+
29
+ export { type StagingPaths, type StagingResult, ensureStaging, hasStagedData, isNewAgent, listStagedAgents, markReady, processAllStaged, processStagedAgent, stagingPaths };
@@ -0,0 +1,21 @@
1
+ import {
2
+ ensureStaging,
3
+ hasStagedData,
4
+ isNewAgent,
5
+ listStagedAgents,
6
+ markReady,
7
+ processAllStaged,
8
+ processStagedAgent,
9
+ stagingPaths
10
+ } from "./chunk-ZCQYHTNU.js";
11
+ import "./chunk-EXEZZADG.js";
12
+ export {
13
+ ensureStaging,
14
+ hasStagedData,
15
+ isNewAgent,
16
+ listStagedAgents,
17
+ markReady,
18
+ processAllStaged,
19
+ processStagedAgent,
20
+ stagingPaths
21
+ };
@@ -0,0 +1,19 @@
1
+ interface SessionSummary {
2
+ title: string;
3
+ slug: string;
4
+ summary: string;
5
+ topics: string[];
6
+ messageCount: number;
7
+ date: string;
8
+ }
9
+ interface SummaryMessage {
10
+ role: string;
11
+ text: string;
12
+ timestamp: string;
13
+ sessionId: string;
14
+ }
15
+ declare function generateSessionSummary(messages: SummaryMessage[]): Promise<SessionSummary>;
16
+ declare function formatSummaryMarkdown(summary: SessionSummary, sessionId: string): string;
17
+ declare function writeSummaryFile(sessionsDir: string, summary: SessionSummary, agentId: string, sessionId: string): string;
18
+
19
+ export { type SessionSummary, type SummaryMessage, formatSummaryMarkdown, generateSessionSummary, writeSummaryFile };
@@ -0,0 +1,10 @@
1
+ import {
2
+ formatSummaryMarkdown,
3
+ generateSessionSummary,
4
+ writeSummaryFile
5
+ } from "./chunk-Y72C7F6O.js";
6
+ export {
7
+ formatSummaryMarkdown,
8
+ generateSessionSummary,
9
+ writeSummaryFile
10
+ };