@wipcomputer/memory-crystal 0.7.34-alpha.2 → 0.7.34-alpha.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (120) hide show
  1. package/LICENSE +1 -1
  2. package/dist/bridge.js +64 -7
  3. package/dist/bulk-copy.js +67 -16
  4. package/dist/cc-hook.js +2163 -62
  5. package/dist/cc-poller.js +1967 -70
  6. package/dist/cli.js +4538 -139
  7. package/dist/core.js +1789 -6
  8. package/dist/crypto.js +153 -14
  9. package/dist/crystal-serve.js +64 -12
  10. package/dist/doctor.js +517 -52
  11. package/dist/dream-weaver.js +1755 -7
  12. package/dist/file-sync.js +407 -9
  13. package/dist/installer.js +840 -145
  14. package/dist/ldm.js +231 -16
  15. package/dist/mcp-server.js +1882 -17
  16. package/dist/migrate.js +1707 -11
  17. package/dist/mirror-sync.js +2052 -34
  18. package/dist/openclaw.js +1895 -84
  19. package/dist/pair.js +112 -16
  20. package/dist/poller.js +2275 -80
  21. package/dist/role.js +159 -7
  22. package/dist/staging.js +235 -10
  23. package/dist/summarize.js +142 -5
  24. package/package.json +7 -4
  25. package/dist/chunk-25LXQJ4Z.js +0 -110
  26. package/dist/chunk-2DRXIRQW.js +0 -97
  27. package/dist/chunk-2GBYLMEF.js +0 -1385
  28. package/dist/chunk-2ZNH5F6E.js +0 -1281
  29. package/dist/chunk-3G3SFYYI.js +0 -288
  30. package/dist/chunk-3RG5ZIWI.js +0 -10
  31. package/dist/chunk-3S6TI23B.js +0 -97
  32. package/dist/chunk-3VFIJYS4.js +0 -818
  33. package/dist/chunk-437F27T6.js +0 -97
  34. package/dist/chunk-52QE3YI3.js +0 -1169
  35. package/dist/chunk-57RP3DIN.js +0 -1205
  36. package/dist/chunk-5HSZ4W2P.js +0 -62
  37. package/dist/chunk-5I7GMRDN.js +0 -146
  38. package/dist/chunk-645IPXW3.js +0 -290
  39. package/dist/chunk-7A7ELD4C.js +0 -1205
  40. package/dist/chunk-7FYY4GZM.js +0 -1205
  41. package/dist/chunk-7IUE7ODU.js +0 -254
  42. package/dist/chunk-7RMLKZIS.js +0 -108
  43. package/dist/chunk-AA3OPP4Z.js +0 -432
  44. package/dist/chunk-AEWLSYPH.js +0 -72
  45. package/dist/chunk-ASSZDR6I.js +0 -108
  46. package/dist/chunk-AYRJVWUC.js +0 -1205
  47. package/dist/chunk-CCYI5O3D.js +0 -148
  48. package/dist/chunk-CGIDSAJB.js +0 -288
  49. package/dist/chunk-D3I3ZSE2.js +0 -411
  50. package/dist/chunk-D3MACYZ4.js +0 -108
  51. package/dist/chunk-DACSKLY6.js +0 -219
  52. package/dist/chunk-DFQ72B7M.js +0 -248
  53. package/dist/chunk-DW5B4BL7.js +0 -108
  54. package/dist/chunk-EKSACBTJ.js +0 -1070
  55. package/dist/chunk-EXEZZADG.js +0 -248
  56. package/dist/chunk-F3Y7EL7K.js +0 -83
  57. package/dist/chunk-FBQWSDPC.js +0 -1328
  58. package/dist/chunk-FHRZNOMW.js +0 -1205
  59. package/dist/chunk-IM7N24MT.js +0 -129
  60. package/dist/chunk-IPNYIXFK.js +0 -1178
  61. package/dist/chunk-J7MRSZIO.js +0 -167
  62. package/dist/chunk-JITKI2OI.js +0 -106
  63. package/dist/chunk-JWZXYVET.js +0 -1068
  64. package/dist/chunk-KCQUXVYT.js +0 -108
  65. package/dist/chunk-KOQ43OX6.js +0 -1281
  66. package/dist/chunk-KYVWO6ZM.js +0 -1069
  67. package/dist/chunk-L3VHARQH.js +0 -413
  68. package/dist/chunk-LBWDS6BE.js +0 -288
  69. package/dist/chunk-LOVAHSQV.js +0 -411
  70. package/dist/chunk-LQOYCAGG.js +0 -446
  71. package/dist/chunk-LWAIPJ2W.js +0 -146
  72. package/dist/chunk-M5DHKW7M.js +0 -127
  73. package/dist/chunk-MBKCIJHM.js +0 -1328
  74. package/dist/chunk-MK42FMEG.js +0 -147
  75. package/dist/chunk-MOBMYHKL.js +0 -1205
  76. package/dist/chunk-MPLTNMRG.js +0 -67
  77. package/dist/chunk-NIJCVN3O.js +0 -147
  78. package/dist/chunk-NX647OM3.js +0 -310
  79. package/dist/chunk-NZCFSZQ7.js +0 -1205
  80. package/dist/chunk-O2UITJGH.js +0 -465
  81. package/dist/chunk-OCRA44AZ.js +0 -108
  82. package/dist/chunk-P3KJR66H.js +0 -117
  83. package/dist/chunk-PEK6JH65.js +0 -432
  84. package/dist/chunk-PJ6FFKEX.js +0 -77
  85. package/dist/chunk-PLUBBZYR.js +0 -800
  86. package/dist/chunk-PNKVD2UK.js +0 -26
  87. package/dist/chunk-PSQZURHO.js +0 -229
  88. package/dist/chunk-SGL6ISBJ.js +0 -1061
  89. package/dist/chunk-SJABZZT5.js +0 -97
  90. package/dist/chunk-TD3P3K32.js +0 -1199
  91. package/dist/chunk-TMDZJJKV.js +0 -288
  92. package/dist/chunk-UNHVZB5G.js +0 -411
  93. package/dist/chunk-VAFTWSTE.js +0 -1061
  94. package/dist/chunk-VNFXFQBB.js +0 -217
  95. package/dist/chunk-X3GVFKSJ.js +0 -1205
  96. package/dist/chunk-XZ3S56RQ.js +0 -1061
  97. package/dist/chunk-Y72C7F6O.js +0 -148
  98. package/dist/chunk-YLICP577.js +0 -1205
  99. package/dist/chunk-YX6AXLVK.js +0 -159
  100. package/dist/chunk-ZCQYHTNU.js +0 -146
  101. package/dist/cloud-crystal.js +0 -6
  102. package/dist/dev-update-SZ2Z4WCQ.js +0 -6
  103. package/dist/llm-XXLYPIOF.js +0 -16
  104. package/dist/mlx-setup-XKU67WCT.js +0 -289
  105. package/dist/search-pipeline-4K4OJSSS.js +0 -255
  106. package/dist/search-pipeline-4PRS6LI7.js +0 -280
  107. package/dist/search-pipeline-7UJMXPLO.js +0 -280
  108. package/dist/search-pipeline-CBV25NX7.js +0 -99
  109. package/dist/search-pipeline-DQTRLGBH.js +0 -74
  110. package/dist/search-pipeline-HNG37REH.js +0 -282
  111. package/dist/search-pipeline-IZFPLBUB.js +0 -280
  112. package/dist/search-pipeline-MID6F26Q.js +0 -73
  113. package/dist/search-pipeline-N52JZFNN.js +0 -282
  114. package/dist/search-pipeline-OPB2PRQQ.js +0 -280
  115. package/dist/search-pipeline-VXTE5HAD.js +0 -262
  116. package/dist/search-pipeline-XHFKADRG.js +0 -73
  117. package/dist/worker-demo.js +0 -186
  118. package/dist/worker-mcp.js +0 -404
  119. package/scripts/crystal-capture 2.sh +0 -29
  120. package/scripts/deploy-cloud 2.sh +0 -153
@@ -1,67 +0,0 @@
1
- // src/bridge.ts
2
- import { existsSync, readFileSync, writeFileSync } from "fs";
3
- import { execSync } from "child_process";
4
- import { join } from "path";
5
- var HOME = process.env.HOME || "";
6
- function _checkLocalBridge() {
7
- if (existsSync(join(HOME, ".openclaw", "extensions", "lesa-bridge", "dist", "index.js"))) return true;
8
- if (existsSync(join(HOME, ".ldm", "extensions", "lesa-bridge", "dist", "index.js"))) return true;
9
- return false;
10
- }
11
- function isBridgeInstalled() {
12
- try {
13
- execSync("which lesa 2>/dev/null", { encoding: "utf-8" });
14
- return true;
15
- } catch {
16
- return _checkLocalBridge();
17
- }
18
- }
19
- function isBridgeRegistered() {
20
- const mcpPath = join(HOME, ".claude", ".mcp.json");
21
- try {
22
- if (existsSync(mcpPath)) {
23
- const config = JSON.parse(readFileSync(mcpPath, "utf-8"));
24
- if (config.mcpServers && config.mcpServers["lesa-bridge"]) return true;
25
- }
26
- } catch {
27
- }
28
- return false;
29
- }
30
- function isBridgeDesktopRegistered() {
31
- const desktopConfig = join(HOME, "Library", "Application Support", "Claude", "claude_desktop_config.json");
32
- try {
33
- if (existsSync(desktopConfig)) {
34
- const config = JSON.parse(readFileSync(desktopConfig, "utf-8"));
35
- if (config.mcpServers && config.mcpServers["lesa-bridge"]) return true;
36
- }
37
- } catch {
38
- }
39
- return false;
40
- }
41
- function registerBridgeMcp() {
42
- execSync("claude mcp add --scope user lesa-bridge -- lesa", {
43
- encoding: "utf-8",
44
- stdio: "pipe"
45
- });
46
- }
47
- function registerBridgeDesktop() {
48
- const desktopConfig = join(HOME, "Library", "Application Support", "Claude", "claude_desktop_config.json");
49
- if (!existsSync(desktopConfig)) return false;
50
- try {
51
- const config = JSON.parse(readFileSync(desktopConfig, "utf-8"));
52
- if (!config.mcpServers) config.mcpServers = {};
53
- config.mcpServers["lesa-bridge"] = { command: "lesa" };
54
- writeFileSync(desktopConfig, JSON.stringify(config, null, 2) + "\n");
55
- return true;
56
- } catch {
57
- return false;
58
- }
59
- }
60
-
61
- export {
62
- isBridgeInstalled,
63
- isBridgeRegistered,
64
- isBridgeDesktopRegistered,
65
- registerBridgeMcp,
66
- registerBridgeDesktop
67
- };
@@ -1,147 +0,0 @@
1
- // src/dev-update.ts
2
- import { execSync } from "child_process";
3
- import { existsSync, mkdirSync, writeFileSync, readFileSync } from "fs";
4
- import { join, basename } from "path";
5
- var HOME = process.env.HOME || "/Users/lesa";
6
- var STAFF_DIR = join(HOME, "Documents", "wipcomputer--mac-mini-01", "staff");
7
- var CC_REPOS = join(STAFF_DIR, "Parker", "Claude Code - Mini", "repos");
8
- var LESA_REPOS = join(STAFF_DIR, "L\u0113sa", "repos");
9
- var DEV_UPDATES_DIR = join(CC_REPOS, "wip-dev-updates");
10
- var LAST_RUN_PATH = join(HOME, ".openclaw", "memory", "dev-update-last-run.json");
11
- function loadLastRun() {
12
- try {
13
- if (existsSync(LAST_RUN_PATH)) {
14
- return JSON.parse(readFileSync(LAST_RUN_PATH, "utf-8"));
15
- }
16
- } catch {
17
- }
18
- return null;
19
- }
20
- function saveLastRun(run) {
21
- const dir = join(HOME, ".openclaw", "memory");
22
- if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
23
- writeFileSync(LAST_RUN_PATH, JSON.stringify(run, null, 2));
24
- }
25
- function git(repoPath, cmd) {
26
- try {
27
- return execSync(`git -C "${repoPath}" ${cmd}`, {
28
- encoding: "utf-8",
29
- timeout: 1e4,
30
- stdio: ["pipe", "pipe", "pipe"]
31
- }).trim();
32
- } catch {
33
- return "";
34
- }
35
- }
36
- function scanRepo(repoPath, since) {
37
- if (!existsSync(join(repoPath, ".git"))) return null;
38
- const name = basename(repoPath);
39
- if (name === "_third-party-repos" || name === "wip-dev-updates") return null;
40
- const recentCommits = git(repoPath, `log --oneline --since="${since}"`);
41
- const uncommitted = git(repoPath, "status --porcelain");
42
- if (!recentCommits && !uncommitted) return null;
43
- const lines = [];
44
- lines.push(`# ${name}`);
45
- lines.push("");
46
- if (recentCommits) {
47
- lines.push("## Recent Commits");
48
- lines.push("");
49
- lines.push("```");
50
- lines.push(...recentCommits.split("\n").slice(0, 10));
51
- lines.push("```");
52
- lines.push("");
53
- }
54
- if (uncommitted) {
55
- lines.push("## Uncommitted Changes");
56
- lines.push("");
57
- lines.push("```");
58
- lines.push(...uncommitted.split("\n").slice(0, 20));
59
- lines.push("```");
60
- lines.push("");
61
- }
62
- if (recentCommits) {
63
- const diffStat = git(repoPath, `diff --stat "HEAD@{${since}}" HEAD`);
64
- if (diffStat) {
65
- lines.push("## Files Changed");
66
- lines.push("");
67
- lines.push("```");
68
- lines.push(...diffStat.split("\n").slice(-15));
69
- lines.push("```");
70
- lines.push("");
71
- }
72
- }
73
- const branch = git(repoPath, "branch --show-current") || "unknown";
74
- lines.push(`**Branch:** ${branch}`);
75
- lines.push("");
76
- return lines.join("\n");
77
- }
78
- function runDevUpdate(author) {
79
- const lastRun = loadLastRun();
80
- if (lastRun) {
81
- const elapsed = Date.now() - new Date(lastRun.timestamp).getTime();
82
- if (elapsed < 60 * 60 * 1e3) {
83
- return { reposUpdated: 0, files: [] };
84
- }
85
- }
86
- let since = "6 hours ago";
87
- if (lastRun?.timestamp) {
88
- const lastDate = new Date(lastRun.timestamp);
89
- const hoursAgo = Math.ceil((Date.now() - lastDate.getTime()) / (1e3 * 60 * 60));
90
- since = `${Math.max(hoursAgo, 1)} hours ago`;
91
- }
92
- const now = /* @__PURE__ */ new Date();
93
- const ts = [
94
- String(now.getMonth() + 1).padStart(2, "0"),
95
- String(now.getDate()).padStart(2, "0"),
96
- String(now.getFullYear())
97
- ].join("-") + "--" + [
98
- String(now.getHours()).padStart(2, "0"),
99
- String(now.getMinutes()).padStart(2, "0"),
100
- String(now.getSeconds()).padStart(2, "0")
101
- ].join("-");
102
- const files = [];
103
- const repoDirs = [CC_REPOS, LESA_REPOS];
104
- for (const parentDir of repoDirs) {
105
- if (!existsSync(parentDir)) continue;
106
- let entries;
107
- try {
108
- entries = execSync(`ls "${parentDir}"`, { encoding: "utf-8" }).trim().split("\n");
109
- } catch {
110
- continue;
111
- }
112
- for (const entry of entries) {
113
- const repoPath = join(parentDir, entry);
114
- const content = scanRepo(repoPath, since);
115
- if (!content) continue;
116
- const repoName = basename(repoPath);
117
- const outDir = join(DEV_UPDATES_DIR, repoName);
118
- const outFile = join(outDir, `${author}-dev-update-${ts}.md`);
119
- mkdirSync(outDir, { recursive: true });
120
- const header = `*Auto-generated dev update by ${author} at ${now.toISOString().slice(0, 16).replace("T", " ")}*
121
-
122
- `;
123
- writeFileSync(outFile, content.replace(/^# .+\n/, `$&
124
- ${header}`));
125
- files.push(`${repoName}/${author}-dev-update-${ts}.md`);
126
- }
127
- }
128
- if (files.length > 0 && existsSync(join(DEV_UPDATES_DIR, ".git"))) {
129
- try {
130
- execSync(
131
- `cd "${DEV_UPDATES_DIR}" && git add -A && git commit -m "${author} auto-dev-update ${ts}: ${files.length} repo(s)" --no-verify && git push --quiet`,
132
- { encoding: "utf-8", timeout: 3e4, stdio: "pipe" }
133
- );
134
- } catch {
135
- }
136
- }
137
- saveLastRun({
138
- timestamp: now.toISOString(),
139
- author,
140
- reposUpdated: files.length
141
- });
142
- return { reposUpdated: files.length, files };
143
- }
144
-
145
- export {
146
- runDevUpdate
147
- };
@@ -1,310 +0,0 @@
1
- // src/llm.ts
2
- import { existsSync, readFileSync } from "fs";
3
- import { join } from "path";
4
- import { homedir } from "os";
5
- import { execSync } from "child_process";
6
- var samplingServer = null;
7
- function setSamplingServer(server) {
8
- samplingServer = server;
9
- }
10
- function hasSampling() {
11
- return samplingServer !== null;
12
- }
13
- var expansionCache = /* @__PURE__ */ new Map();
14
- var _cacheDb = null;
15
- var CACHE_TTL_DAYS = parseInt(process.env.CRYSTAL_CACHE_TTL_DAYS || "7", 10);
16
- function setLLMCacheDb(db) {
17
- _cacheDb = db;
18
- }
19
- function dbCacheGet(key) {
20
- if (!_cacheDb) return null;
21
- try {
22
- const row = _cacheDb.prepare(
23
- "SELECT result FROM llm_cache WHERE cache_key = ? AND created_at > ?"
24
- ).get(key, new Date(Date.now() - CACHE_TTL_DAYS * 864e5).toISOString());
25
- if (row) {
26
- _cacheDb.prepare("UPDATE llm_cache SET hit_count = hit_count + 1, last_hit_at = ? WHERE cache_key = ?").run((/* @__PURE__ */ new Date()).toISOString(), key);
27
- return row.result;
28
- }
29
- } catch {
30
- }
31
- return null;
32
- }
33
- function dbCacheSet(key, type, query, intent, result, provider) {
34
- if (!_cacheDb) return;
35
- try {
36
- _cacheDb.prepare(
37
- "INSERT OR REPLACE INTO llm_cache (cache_key, cache_type, query, intent, result, provider, created_at, hit_count, last_hit_at) VALUES (?, ?, ?, ?, ?, ?, ?, 0, NULL)"
38
- ).run(key, type, query, intent || null, result, provider, (/* @__PURE__ */ new Date()).toISOString());
39
- } catch {
40
- }
41
- }
42
- var detectedProvider = null;
43
- var detectionDone = false;
44
- function getOpSecret(itemName, fieldLabel) {
45
- try {
46
- const saTokenPath = join(homedir(), ".openclaw/secrets/op-sa-token");
47
- if (!existsSync(saTokenPath)) return void 0;
48
- const saToken = readFileSync(saTokenPath, "utf-8").trim();
49
- const result = execSync(
50
- `OP_SERVICE_ACCOUNT_TOKEN="${saToken}" op item get "${itemName}" --vault "Agent Secrets" --fields "${fieldLabel}" --reveal`,
51
- { encoding: "utf-8", timeout: 5e3, stdio: ["pipe", "pipe", "pipe"] }
52
- ).trim();
53
- return result || void 0;
54
- } catch {
55
- return void 0;
56
- }
57
- }
58
- async function detectProvider() {
59
- if (detectionDone && detectedProvider) return detectedProvider;
60
- detectionDone = true;
61
- if (samplingServer) {
62
- detectedProvider = { provider: "sampling", baseURL: "", apiKey: "", model: "client-selected" };
63
- process.stderr.write("[memory-crystal] LLM provider: MCP Sampling (via client)\n");
64
- return detectedProvider;
65
- }
66
- try {
67
- const resp = await fetch("http://localhost:18791/v1/models", { signal: AbortSignal.timeout(1e3) });
68
- if (resp.ok) {
69
- const data = await resp.json();
70
- const model = data?.data?.[0]?.id || "default";
71
- detectedProvider = { provider: "mlx", baseURL: "http://localhost:18791/v1", apiKey: "not-needed", model };
72
- process.stderr.write(`[memory-crystal] LLM provider: MLX (${model})
73
- `);
74
- return detectedProvider;
75
- }
76
- } catch {
77
- }
78
- try {
79
- const resp = await fetch("http://localhost:11434/api/tags", { signal: AbortSignal.timeout(1e3) });
80
- if (resp.ok) {
81
- const data = await resp.json();
82
- const models = data?.models || [];
83
- const embeddingOnly = ["nomic-embed-text", "mxbai-embed", "all-minilm", "snowflake-arctic-embed"];
84
- const chatModel = models.find((m) => !embeddingOnly.some((e) => m.name.startsWith(e)));
85
- if (chatModel) {
86
- detectedProvider = { provider: "ollama", baseURL: "http://localhost:11434/v1", apiKey: "ollama", model: chatModel.name };
87
- process.stderr.write(`[memory-crystal] LLM provider: Ollama (${chatModel.name})
88
- `);
89
- return detectedProvider;
90
- }
91
- }
92
- } catch {
93
- }
94
- const openaiKey = process.env.OPENAI_API_KEY || getOpSecret("OpenAI API", "api key");
95
- if (openaiKey) {
96
- detectedProvider = { provider: "openai", baseURL: "https://api.openai.com/v1", apiKey: openaiKey, model: "gpt-4o-mini" };
97
- process.stderr.write("[memory-crystal] LLM provider: OpenAI API\n");
98
- return detectedProvider;
99
- }
100
- const anthropicKey = process.env.ANTHROPIC_API_KEY || getOpSecret("Anthropic Auth Token - remote bunkers", "Auth Token");
101
- if (anthropicKey && !anthropicKey.startsWith("sk-ant-oat")) {
102
- detectedProvider = { provider: "anthropic", baseURL: "https://api.anthropic.com", apiKey: anthropicKey, model: "claude-haiku-4-5-20251001" };
103
- process.stderr.write("[memory-crystal] LLM provider: Anthropic API\n");
104
- return detectedProvider;
105
- }
106
- detectedProvider = { provider: "none", baseURL: "", apiKey: "", model: "" };
107
- process.stderr.write("[memory-crystal] LLM provider: none (deep search unavailable)\n");
108
- return detectedProvider;
109
- }
110
- async function chatComplete(config, messages, maxTokens = 300) {
111
- if (config.provider === "sampling") {
112
- return samplingComplete(messages, maxTokens);
113
- }
114
- if (config.provider === "anthropic") {
115
- return anthropicComplete(config, messages, maxTokens);
116
- }
117
- const resp = await fetch(`${config.baseURL}/chat/completions`, {
118
- method: "POST",
119
- headers: {
120
- "Content-Type": "application/json",
121
- "Authorization": `Bearer ${config.apiKey}`
122
- },
123
- body: JSON.stringify({
124
- model: config.model,
125
- messages,
126
- max_tokens: maxTokens,
127
- temperature: 0.7
128
- })
129
- });
130
- if (!resp.ok) throw new Error(`LLM request failed: ${resp.status}`);
131
- const data = await resp.json();
132
- return data.choices?.[0]?.message?.content || "";
133
- }
134
- async function anthropicComplete(config, messages, maxTokens) {
135
- const systemMsg = messages.find((m) => m.role === "system");
136
- const userMessages = messages.filter((m) => m.role !== "system");
137
- const body = {
138
- model: config.model,
139
- max_tokens: maxTokens,
140
- messages: userMessages
141
- };
142
- if (systemMsg) body.system = systemMsg.content;
143
- const resp = await fetch("https://api.anthropic.com/v1/messages", {
144
- method: "POST",
145
- headers: {
146
- "Content-Type": "application/json",
147
- "x-api-key": config.apiKey,
148
- "anthropic-version": "2023-06-01"
149
- },
150
- body: JSON.stringify(body)
151
- });
152
- if (!resp.ok) throw new Error(`Anthropic request failed: ${resp.status}`);
153
- const data = await resp.json();
154
- return data.content?.[0]?.text || "";
155
- }
156
- async function samplingComplete(messages, maxTokens) {
157
- if (!samplingServer) throw new Error("MCP sampling server not set");
158
- const systemMsg = messages.find((m) => m.role === "system");
159
- const userMessages = messages.filter((m) => m.role !== "system");
160
- const result = await samplingServer.createMessage({
161
- messages: userMessages.map((m) => ({
162
- role: m.role,
163
- content: { type: "text", text: m.content }
164
- })),
165
- systemPrompt: systemMsg?.content,
166
- maxTokens,
167
- modelPreferences: {
168
- // Request cheap, fast model (Haiku-class). We don't need Opus for query expansion.
169
- costPriority: 0.9,
170
- speedPriority: 0.8,
171
- intelligencePriority: 0.3,
172
- hints: [{ name: "haiku" }]
173
- }
174
- });
175
- if (result?.content?.type === "text") return result.content.text;
176
- if (typeof result?.content === "string") return result.content;
177
- return "";
178
- }
179
- var EXPAND_PROMPT = `You are a search query expander. Given a search query, generate exactly 3 variations to improve search recall.
180
-
181
- Output exactly 3 lines in this format (no other text):
182
- lex: <keyword-focused variation for full-text search>
183
- vec: <semantic variation rephrased for embedding similarity>
184
- hyde: <hypothetical document snippet that would answer this query>
185
-
186
- Rules:
187
- - Each variation must contain at least one term from the original query
188
- - Keep variations concise (under 30 words each)
189
- - lex should use specific keywords and synonyms
190
- - vec should rephrase the intent naturally
191
- - hyde should be a short passage as if answering the query`;
192
- async function expandQuery(query, intent) {
193
- const cacheKey = intent ? `expand:${query}||${intent}` : `expand:${query}`;
194
- const dbCached = dbCacheGet(cacheKey);
195
- if (dbCached) {
196
- try {
197
- return JSON.parse(dbCached);
198
- } catch {
199
- }
200
- }
201
- const cached = expansionCache.get(cacheKey);
202
- if (cached) return cached;
203
- const config = await detectProvider();
204
- if (config.provider === "none") return [];
205
- try {
206
- const intentContext = intent ? `
207
- Query intent: ${intent}. Use this to guide your variations toward the intended domain.` : "";
208
- const result = await chatComplete(config, [
209
- { role: "system", content: EXPAND_PROMPT + intentContext },
210
- { role: "user", content: query }
211
- ], 300);
212
- const lines = result.trim().split("\n");
213
- const queryLower = query.toLowerCase();
214
- const queryTerms = queryLower.replace(/[^a-z0-9\s]/g, " ").split(/\s+/).filter(Boolean);
215
- const hasQueryTerm = (text) => {
216
- const lower = text.toLowerCase();
217
- if (queryTerms.length === 0) return true;
218
- return queryTerms.some((term) => lower.includes(term));
219
- };
220
- const variations = lines.map((line) => {
221
- const colonIdx = line.indexOf(":");
222
- if (colonIdx === -1) return null;
223
- const type = line.slice(0, colonIdx).trim();
224
- if (type !== "lex" && type !== "vec" && type !== "hyde") return null;
225
- const text = line.slice(colonIdx + 1).trim();
226
- if (!text || !hasQueryTerm(text)) return null;
227
- return { type, text };
228
- }).filter((v) => v !== null);
229
- if (variations.length > 0) {
230
- expansionCache.set(cacheKey, variations);
231
- dbCacheSet(cacheKey, "expansion", query, intent, JSON.stringify(variations), config.provider);
232
- return variations;
233
- }
234
- } catch (err) {
235
- process.stderr.write(`[memory-crystal] Query expansion failed: ${err.message}
236
- `);
237
- }
238
- const fallback = [
239
- { type: "lex", text: query },
240
- { type: "vec", text: query },
241
- { type: "hyde", text: `Information about ${query}` }
242
- ];
243
- return fallback;
244
- }
245
- var RERANK_PROMPT = `You are a search result re-ranker. Given a query and a list of text passages, rate each passage's relevance to the query.
246
-
247
- Output one line per passage in this exact format:
248
- <index>: <score>
249
-
250
- Where index is the passage number (0-based) and score is a float from 0.0 to 1.0.
251
- - 1.0 = perfectly relevant, directly answers the query
252
- - 0.7 = highly relevant, closely related
253
- - 0.4 = somewhat relevant, tangentially related
254
- - 0.1 = barely relevant
255
- - 0.0 = not relevant at all
256
-
257
- Rate ALL passages. Output nothing else.`;
258
- async function rerankResults(query, passages) {
259
- const config = await detectProvider();
260
- if (config.provider === "none") {
261
- return passages.map((_, i) => ({ index: i, score: 1 - i * 0.01 }));
262
- }
263
- const { createHash } = await import("crypto");
264
- const contentHash = createHash("sha256").update(passages.map((p) => p.slice(0, 200)).sort().join("|")).digest("hex").slice(0, 16);
265
- const rerankCacheKey = `rerank:${query}||${contentHash}`;
266
- const dbCachedRerank = dbCacheGet(rerankCacheKey);
267
- if (dbCachedRerank) {
268
- try {
269
- return JSON.parse(dbCachedRerank);
270
- } catch {
271
- }
272
- }
273
- try {
274
- const passageList = passages.map((p, i) => `[${i}] ${p.slice(0, 500)}`).join("\n\n");
275
- const result = await chatComplete(config, [
276
- { role: "system", content: RERANK_PROMPT },
277
- { role: "user", content: `Query: ${query}
278
-
279
- Passages:
280
- ${passageList}` }
281
- ], 200);
282
- const results = [];
283
- for (const line of result.trim().split("\n")) {
284
- const match = line.match(/^(\d+):\s*([\d.]+)/);
285
- if (match) {
286
- results.push({ index: parseInt(match[1]), score: parseFloat(match[2]) });
287
- }
288
- }
289
- const scored = new Set(results.map((r) => r.index));
290
- for (let i = 0; i < passages.length; i++) {
291
- if (!scored.has(i)) results.push({ index: i, score: 0 });
292
- }
293
- const sorted = results.sort((a, b) => b.score - a.score);
294
- dbCacheSet(rerankCacheKey, "rerank", query, void 0, JSON.stringify(sorted), config.provider);
295
- return sorted;
296
- } catch (err) {
297
- process.stderr.write(`[memory-crystal] Reranking failed: ${err.message}
298
- `);
299
- return passages.map((_, i) => ({ index: i, score: 1 - i * 0.01 }));
300
- }
301
- }
302
-
303
- export {
304
- setSamplingServer,
305
- hasSampling,
306
- setLLMCacheDb,
307
- detectProvider,
308
- expandQuery,
309
- rerankResults
310
- };