@wipcomputer/memory-crystal 0.7.30 → 0.7.33
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/SKILL.md +1 -1
- package/cloud/wrangler.toml +30 -0
- package/dist/bulk-copy.js +1 -1
- package/dist/cc-hook.js +3 -3
- package/dist/cc-poller.js +2 -2
- package/dist/chunk-2GBYLMEF.js +1385 -0
- package/dist/chunk-437F27T6.js +97 -0
- package/dist/chunk-5I7GMRDN.js +146 -0
- package/dist/chunk-CGIDSAJB.js +288 -0
- package/dist/chunk-D3MACYZ4.js +108 -0
- package/dist/chunk-DFQ72B7M.js +248 -0
- package/dist/chunk-NX647OM3.js +310 -0
- package/dist/cli.js +62 -7
- package/dist/core.d.ts +22 -2
- package/dist/core.js +1 -1
- package/dist/crypto.js +2 -2
- package/dist/crystal-serve.js +3 -3
- package/dist/doctor.js +12 -4
- package/dist/dream-weaver.js +2 -2
- package/dist/file-sync.js +3 -3
- package/dist/installer.js +99 -3
- package/dist/ldm.js +1 -1
- package/dist/llm-XXLYPIOF.js +16 -0
- package/dist/mcp-server.js +17 -5
- package/dist/migrate.js +1 -1
- package/dist/mirror-sync.js +4 -4
- package/dist/mlx-setup-XKU67WCT.js +289 -0
- package/dist/openclaw.js +16 -5
- package/dist/pair.js +2 -2
- package/dist/poller.js +5 -5
- package/dist/role.js +2 -2
- package/dist/search-pipeline-CBV25NX7.js +99 -0
- package/dist/staging.js +2 -2
- package/package.json +15 -1
- package/.env.example +0 -20
- package/.publish-skill.json +0 -1
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/.env.example +0 -20
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/.publish-skill.json +0 -1
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/CHANGELOG.md +0 -1297
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/CLA.md +0 -19
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/LICENSE +0 -52
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/README-ENTERPRISE.md +0 -226
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/README.md +0 -151
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/RELAY.md +0 -199
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/SKILL.md +0 -462
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/TECHNICAL.md +0 -656
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/_trash/RELEASE-NOTES-v0-7-23.md +0 -48
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/_trash/RELEASE-NOTES-v0-7-25.md +0 -24
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/_trash/RELEASE-NOTES-v0-7-26.md +0 -7
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/_trash/RELEASE-NOTES-v0-7-28.md +0 -31
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/_trash/RELEASE-NOTES-v0-7-29.md +0 -28
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/_trash/RELEASE-NOTES-v0-7-4.md +0 -64
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/_trash/RELEASE-NOTES-v0-7-5.md +0 -19
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/cloud/README.md +0 -116
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/cloud/docs/gpt-system-instructions.md +0 -69
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/cloud/migrations/0001_init.sql +0 -52
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/migrations/0001_init.sql +0 -51
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/migrations/0002_cloud_storage.sql +0 -49
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/openclaw.plugin.json +0 -11
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/package-lock.json +0 -4169
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/package.json +0 -61
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/scripts/crystal-capture.sh +0 -29
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/scripts/deploy-cloud.sh +0 -153
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/scripts/ldm-backup.sh +0 -116
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/scripts/migrate-lance-to-sqlite.mjs +0 -218
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/skills/memory/SKILL.md +0 -438
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/wrangler-demo.toml +0 -8
- package/.worktrees/memory-crystal-private--cc-mini-fix-home-fallback/wrangler-mcp.toml +0 -24
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/.env.example +0 -20
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/.publish-skill.json +0 -1
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/CHANGELOG.md +0 -1297
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/CLA.md +0 -19
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/LICENSE +0 -52
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/README-ENTERPRISE.md +0 -226
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/README.md +0 -151
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/RELAY.md +0 -199
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/RELEASE-NOTES-v0.7.30.md +0 -29
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/SKILL.md +0 -462
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/TECHNICAL.md +0 -656
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/_trash/RELEASE-NOTES-v0-7-23.md +0 -48
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/_trash/RELEASE-NOTES-v0-7-25.md +0 -24
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/_trash/RELEASE-NOTES-v0-7-26.md +0 -7
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/_trash/RELEASE-NOTES-v0-7-28.md +0 -31
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/_trash/RELEASE-NOTES-v0-7-29.md +0 -28
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/_trash/RELEASE-NOTES-v0-7-4.md +0 -64
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/_trash/RELEASE-NOTES-v0-7-5.md +0 -19
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/cloud/README.md +0 -116
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/cloud/docs/gpt-system-instructions.md +0 -69
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/cloud/migrations/0001_init.sql +0 -52
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/migrations/0001_init.sql +0 -51
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/migrations/0002_cloud_storage.sql +0 -49
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/openclaw.plugin.json +0 -11
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/package-lock.json +0 -4169
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/package.json +0 -61
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/scripts/crystal-capture.sh +0 -29
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/scripts/deploy-cloud.sh +0 -153
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/scripts/ldm-backup.sh +0 -116
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/scripts/migrate-lance-to-sqlite.mjs +0 -218
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/skills/memory/SKILL.md +0 -438
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/wrangler-demo.toml +0 -8
- package/.worktrees/memory-crystal-private--cc-mini-release-notes-v0.7.30/wrangler-mcp.toml +0 -24
- package/CHANGELOG.md +0 -1329
- package/README-ENTERPRISE.md +0 -226
- package/RELAY.md +0 -199
- package/_trash/RELEASE-NOTES-v0-7-23.md +0 -48
- package/_trash/RELEASE-NOTES-v0-7-25.md +0 -24
- package/_trash/RELEASE-NOTES-v0-7-26.md +0 -7
- package/_trash/RELEASE-NOTES-v0-7-28.md +0 -31
- package/_trash/RELEASE-NOTES-v0-7-29.md +0 -28
- package/_trash/RELEASE-NOTES-v0-7-4.md +0 -64
- package/_trash/RELEASE-NOTES-v0-7-5.md +0 -19
- package/_trash/RELEASE-NOTES-v0.7.30.md +0 -29
- package/wrangler-demo.toml +0 -8
- package/wrangler-mcp.toml +0 -24
|
@@ -0,0 +1,248 @@
|
|
|
1
|
+
// src/ldm.ts
|
|
2
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync, copyFileSync, chmodSync, readdirSync } from "fs";
|
|
3
|
+
import { join, dirname } from "path";
|
|
4
|
+
import { execSync } from "child_process";
|
|
5
|
+
import { fileURLToPath } from "url";
|
|
6
|
+
var HOME = process.env.HOME || "";
|
|
7
|
+
var LDM_ROOT = join(HOME, ".ldm");
|
|
8
|
+
function loadAgentConfig(id) {
|
|
9
|
+
const cfgPath = join(LDM_ROOT, "agents", id, "config.json");
|
|
10
|
+
try {
|
|
11
|
+
if (existsSync(cfgPath)) return JSON.parse(readFileSync(cfgPath, "utf-8"));
|
|
12
|
+
} catch {
|
|
13
|
+
}
|
|
14
|
+
return null;
|
|
15
|
+
}
|
|
16
|
+
function saveAgentConfig(id, config) {
|
|
17
|
+
const dir = join(LDM_ROOT, "agents", id);
|
|
18
|
+
mkdirSync(dir, { recursive: true });
|
|
19
|
+
writeFileSync(join(dir, "config.json"), JSON.stringify(config, null, 2) + "\n");
|
|
20
|
+
}
|
|
21
|
+
function getAgentId(harnessHint) {
|
|
22
|
+
if (process.env.CRYSTAL_AGENT_ID) return process.env.CRYSTAL_AGENT_ID;
|
|
23
|
+
const agentsDir = join(LDM_ROOT, "agents");
|
|
24
|
+
if (existsSync(agentsDir)) {
|
|
25
|
+
try {
|
|
26
|
+
for (const d of readdirSync(agentsDir)) {
|
|
27
|
+
const cfg = loadAgentConfig(d);
|
|
28
|
+
if (!cfg || !cfg.agentId) continue;
|
|
29
|
+
if (!harnessHint) return cfg.agentId;
|
|
30
|
+
if (harnessHint === "claude-code" && cfg.harness === "claude-code-cli") return cfg.agentId;
|
|
31
|
+
if (harnessHint === "openclaw" && cfg.harness === "openclaw") return cfg.agentId;
|
|
32
|
+
}
|
|
33
|
+
} catch {
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
return harnessHint === "openclaw" ? "oc-lesa-mini" : "cc-mini";
|
|
37
|
+
}
|
|
38
|
+
function ldmPaths(agentId) {
|
|
39
|
+
const id = agentId || getAgentId();
|
|
40
|
+
const agentRoot = join(LDM_ROOT, "agents", id);
|
|
41
|
+
return {
|
|
42
|
+
root: LDM_ROOT,
|
|
43
|
+
bin: join(LDM_ROOT, "bin"),
|
|
44
|
+
secrets: join(LDM_ROOT, "secrets"),
|
|
45
|
+
state: join(LDM_ROOT, "state"),
|
|
46
|
+
config: join(LDM_ROOT, "config.json"),
|
|
47
|
+
crystalDb: join(LDM_ROOT, "memory", "crystal.db"),
|
|
48
|
+
crystalLance: join(LDM_ROOT, "memory", "lance"),
|
|
49
|
+
agentRoot,
|
|
50
|
+
transcripts: join(agentRoot, "memory", "transcripts"),
|
|
51
|
+
sessions: join(agentRoot, "memory", "sessions"),
|
|
52
|
+
daily: join(agentRoot, "memory", "daily"),
|
|
53
|
+
journals: join(agentRoot, "memory", "journals"),
|
|
54
|
+
workspace: join(agentRoot, "memory", "workspace")
|
|
55
|
+
};
|
|
56
|
+
}
|
|
57
|
+
function loadConfig() {
|
|
58
|
+
const configPath = join(LDM_ROOT, "config.json");
|
|
59
|
+
try {
|
|
60
|
+
if (existsSync(configPath)) {
|
|
61
|
+
return JSON.parse(readFileSync(configPath, "utf-8"));
|
|
62
|
+
}
|
|
63
|
+
} catch {
|
|
64
|
+
}
|
|
65
|
+
return null;
|
|
66
|
+
}
|
|
67
|
+
function saveConfig(config) {
|
|
68
|
+
const configPath = join(LDM_ROOT, "config.json");
|
|
69
|
+
writeFileSync(configPath, JSON.stringify(config, null, 2) + "\n");
|
|
70
|
+
}
|
|
71
|
+
function scaffoldLdm(agentId) {
|
|
72
|
+
const paths = ldmPaths(agentId);
|
|
73
|
+
mkdirSync(join(paths.root, "memory"), { recursive: true });
|
|
74
|
+
mkdirSync(paths.crystalLance, { recursive: true });
|
|
75
|
+
mkdirSync(paths.bin, { recursive: true });
|
|
76
|
+
mkdirSync(paths.secrets, { recursive: true, mode: 448 });
|
|
77
|
+
mkdirSync(paths.state, { recursive: true });
|
|
78
|
+
mkdirSync(paths.transcripts, { recursive: true });
|
|
79
|
+
mkdirSync(paths.sessions, { recursive: true });
|
|
80
|
+
mkdirSync(paths.daily, { recursive: true });
|
|
81
|
+
mkdirSync(paths.journals, { recursive: true });
|
|
82
|
+
mkdirSync(paths.workspace, { recursive: true });
|
|
83
|
+
const id = agentId || getAgentId();
|
|
84
|
+
let config = loadConfig();
|
|
85
|
+
if (!config) {
|
|
86
|
+
config = {
|
|
87
|
+
version: "1.0.0",
|
|
88
|
+
agents: [id],
|
|
89
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
90
|
+
updatedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
91
|
+
};
|
|
92
|
+
} else {
|
|
93
|
+
if (!config.agents.includes(id)) {
|
|
94
|
+
config.agents.push(id);
|
|
95
|
+
}
|
|
96
|
+
config.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
97
|
+
}
|
|
98
|
+
saveConfig(config);
|
|
99
|
+
return paths;
|
|
100
|
+
}
|
|
101
|
+
function deployCaptureScript() {
|
|
102
|
+
const paths = ldmPaths();
|
|
103
|
+
mkdirSync(paths.bin, { recursive: true });
|
|
104
|
+
const thisDir = dirname(fileURLToPath(import.meta.url));
|
|
105
|
+
let scriptSrc = join(thisDir, "crystal-capture.sh");
|
|
106
|
+
if (!existsSync(scriptSrc)) {
|
|
107
|
+
scriptSrc = join(thisDir, "..", "scripts", "crystal-capture.sh");
|
|
108
|
+
}
|
|
109
|
+
const scriptDest = join(paths.bin, "crystal-capture.sh");
|
|
110
|
+
if (!existsSync(scriptSrc)) {
|
|
111
|
+
throw new Error(`crystal-capture.sh not found at ${scriptSrc}`);
|
|
112
|
+
}
|
|
113
|
+
copyFileSync(scriptSrc, scriptDest);
|
|
114
|
+
chmodSync(scriptDest, 493);
|
|
115
|
+
return scriptDest;
|
|
116
|
+
}
|
|
117
|
+
var CRON_TAG = "# crystal-capture";
|
|
118
|
+
var CRON_ENTRY = "* * * * * ~/.ldm/bin/crystal-capture.sh >> ~/.ldm/logs/crystal-capture.log 2>&1";
|
|
119
|
+
function isCrystalCaptureLine(line) {
|
|
120
|
+
return line === CRON_TAG || line.includes("crystal-capture.sh") && line.startsWith("*");
|
|
121
|
+
}
|
|
122
|
+
function installCron() {
|
|
123
|
+
mkdirSync(join(HOME, ".ldm", "logs"), { recursive: true });
|
|
124
|
+
let existing = "";
|
|
125
|
+
try {
|
|
126
|
+
existing = execSync("crontab -l 2>/dev/null", { encoding: "utf8" });
|
|
127
|
+
} catch {
|
|
128
|
+
}
|
|
129
|
+
const lines = existing.split("\n").filter((line) => !isCrystalCaptureLine(line));
|
|
130
|
+
lines.push(CRON_TAG);
|
|
131
|
+
lines.push(CRON_ENTRY);
|
|
132
|
+
const newCrontab = lines.filter((l, i, arr) => !(l === "" && i === arr.length - 1)).join("\n") + "\n";
|
|
133
|
+
execSync("crontab -", { input: newCrontab, encoding: "utf8" });
|
|
134
|
+
}
|
|
135
|
+
function removeCron() {
|
|
136
|
+
let existing = "";
|
|
137
|
+
try {
|
|
138
|
+
existing = execSync("crontab -l 2>/dev/null", { encoding: "utf8" });
|
|
139
|
+
} catch {
|
|
140
|
+
return;
|
|
141
|
+
}
|
|
142
|
+
const lines = existing.split("\n").filter((line) => !isCrystalCaptureLine(line));
|
|
143
|
+
const newCrontab = lines.join("\n");
|
|
144
|
+
execSync("crontab -", { input: newCrontab, encoding: "utf8" });
|
|
145
|
+
}
|
|
146
|
+
function deployBackupScript() {
|
|
147
|
+
const paths = ldmPaths();
|
|
148
|
+
mkdirSync(paths.bin, { recursive: true });
|
|
149
|
+
const thisDir = dirname(fileURLToPath(import.meta.url));
|
|
150
|
+
let scriptSrc = join(thisDir, "ldm-backup.sh");
|
|
151
|
+
if (!existsSync(scriptSrc)) {
|
|
152
|
+
scriptSrc = join(thisDir, "..", "scripts", "ldm-backup.sh");
|
|
153
|
+
}
|
|
154
|
+
const scriptDest = join(paths.bin, "ldm-backup.sh");
|
|
155
|
+
if (!existsSync(scriptSrc)) {
|
|
156
|
+
throw new Error(`ldm-backup.sh not found at ${scriptSrc}`);
|
|
157
|
+
}
|
|
158
|
+
copyFileSync(scriptSrc, scriptDest);
|
|
159
|
+
chmodSync(scriptDest, 493);
|
|
160
|
+
return scriptDest;
|
|
161
|
+
}
|
|
162
|
+
function installBackupLaunchAgent() {
|
|
163
|
+
const scriptPath = join(ldmPaths().bin, "ldm-backup.sh");
|
|
164
|
+
if (!existsSync(scriptPath)) {
|
|
165
|
+
throw new Error(`Backup script not found. Run crystal init first.`);
|
|
166
|
+
}
|
|
167
|
+
const launchAgentsDir = join(HOME, "Library", "LaunchAgents");
|
|
168
|
+
mkdirSync(launchAgentsDir, { recursive: true });
|
|
169
|
+
const plistPath = join(launchAgentsDir, "ai.openclaw.ldm-backup.plist");
|
|
170
|
+
const plist = `<?xml version="1.0" encoding="UTF-8"?>
|
|
171
|
+
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
|
172
|
+
<plist version="1.0">
|
|
173
|
+
<dict>
|
|
174
|
+
<key>Label</key>
|
|
175
|
+
<string>ai.openclaw.ldm-backup</string>
|
|
176
|
+
<key>ProgramArguments</key>
|
|
177
|
+
<array>
|
|
178
|
+
<string>bash</string>
|
|
179
|
+
<string>${scriptPath}</string>
|
|
180
|
+
</array>
|
|
181
|
+
<key>StartCalendarInterval</key>
|
|
182
|
+
<dict>
|
|
183
|
+
<key>Hour</key>
|
|
184
|
+
<integer>3</integer>
|
|
185
|
+
<key>Minute</key>
|
|
186
|
+
<integer>0</integer>
|
|
187
|
+
</dict>
|
|
188
|
+
<key>StandardOutPath</key>
|
|
189
|
+
<string>${HOME}/.ldm/logs/ldm-backup.log</string>
|
|
190
|
+
<key>StandardErrorPath</key>
|
|
191
|
+
<string>${HOME}/.ldm/logs/ldm-backup.log</string>
|
|
192
|
+
</dict>
|
|
193
|
+
</plist>`;
|
|
194
|
+
writeFileSync(plistPath, plist);
|
|
195
|
+
try {
|
|
196
|
+
execSync(`launchctl unload ${plistPath} 2>/dev/null`);
|
|
197
|
+
} catch {
|
|
198
|
+
}
|
|
199
|
+
execSync(`launchctl load ${plistPath}`);
|
|
200
|
+
return plistPath;
|
|
201
|
+
}
|
|
202
|
+
var LEGACY_OC_DIR = join(HOME, ".openclaw");
|
|
203
|
+
function resolveStatePath(filename) {
|
|
204
|
+
const paths = ldmPaths();
|
|
205
|
+
const ldmPath = join(paths.state, filename);
|
|
206
|
+
if (existsSync(ldmPath)) return ldmPath;
|
|
207
|
+
const legacyPath = join(LEGACY_OC_DIR, "memory", filename);
|
|
208
|
+
if (existsSync(legacyPath)) return legacyPath;
|
|
209
|
+
return ldmPath;
|
|
210
|
+
}
|
|
211
|
+
function stateWritePath(filename) {
|
|
212
|
+
const paths = ldmPaths();
|
|
213
|
+
const dir = paths.state;
|
|
214
|
+
if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
|
|
215
|
+
return join(dir, filename);
|
|
216
|
+
}
|
|
217
|
+
function resolveSecretPath(filename) {
|
|
218
|
+
const paths = ldmPaths();
|
|
219
|
+
const ldmPath = join(paths.secrets, filename);
|
|
220
|
+
if (existsSync(ldmPath)) return ldmPath;
|
|
221
|
+
const legacyPath = join(LEGACY_OC_DIR, "secrets", filename);
|
|
222
|
+
if (existsSync(legacyPath)) return legacyPath;
|
|
223
|
+
return ldmPath;
|
|
224
|
+
}
|
|
225
|
+
function ensureLdm(agentId) {
|
|
226
|
+
const paths = ldmPaths(agentId);
|
|
227
|
+
if (existsSync(paths.transcripts) && existsSync(paths.config)) {
|
|
228
|
+
return paths;
|
|
229
|
+
}
|
|
230
|
+
return scaffoldLdm(agentId);
|
|
231
|
+
}
|
|
232
|
+
|
|
233
|
+
export {
|
|
234
|
+
loadAgentConfig,
|
|
235
|
+
saveAgentConfig,
|
|
236
|
+
getAgentId,
|
|
237
|
+
ldmPaths,
|
|
238
|
+
scaffoldLdm,
|
|
239
|
+
deployCaptureScript,
|
|
240
|
+
installCron,
|
|
241
|
+
removeCron,
|
|
242
|
+
deployBackupScript,
|
|
243
|
+
installBackupLaunchAgent,
|
|
244
|
+
resolveStatePath,
|
|
245
|
+
stateWritePath,
|
|
246
|
+
resolveSecretPath,
|
|
247
|
+
ensureLdm
|
|
248
|
+
};
|
|
@@ -0,0 +1,310 @@
|
|
|
1
|
+
// src/llm.ts
|
|
2
|
+
import { existsSync, readFileSync } from "fs";
|
|
3
|
+
import { join } from "path";
|
|
4
|
+
import { homedir } from "os";
|
|
5
|
+
import { execSync } from "child_process";
|
|
6
|
+
var samplingServer = null;
|
|
7
|
+
function setSamplingServer(server) {
|
|
8
|
+
samplingServer = server;
|
|
9
|
+
}
|
|
10
|
+
function hasSampling() {
|
|
11
|
+
return samplingServer !== null;
|
|
12
|
+
}
|
|
13
|
+
var expansionCache = /* @__PURE__ */ new Map();
|
|
14
|
+
var _cacheDb = null;
|
|
15
|
+
var CACHE_TTL_DAYS = parseInt(process.env.CRYSTAL_CACHE_TTL_DAYS || "7", 10);
|
|
16
|
+
function setLLMCacheDb(db) {
|
|
17
|
+
_cacheDb = db;
|
|
18
|
+
}
|
|
19
|
+
function dbCacheGet(key) {
|
|
20
|
+
if (!_cacheDb) return null;
|
|
21
|
+
try {
|
|
22
|
+
const row = _cacheDb.prepare(
|
|
23
|
+
"SELECT result FROM llm_cache WHERE cache_key = ? AND created_at > ?"
|
|
24
|
+
).get(key, new Date(Date.now() - CACHE_TTL_DAYS * 864e5).toISOString());
|
|
25
|
+
if (row) {
|
|
26
|
+
_cacheDb.prepare("UPDATE llm_cache SET hit_count = hit_count + 1, last_hit_at = ? WHERE cache_key = ?").run((/* @__PURE__ */ new Date()).toISOString(), key);
|
|
27
|
+
return row.result;
|
|
28
|
+
}
|
|
29
|
+
} catch {
|
|
30
|
+
}
|
|
31
|
+
return null;
|
|
32
|
+
}
|
|
33
|
+
function dbCacheSet(key, type, query, intent, result, provider) {
|
|
34
|
+
if (!_cacheDb) return;
|
|
35
|
+
try {
|
|
36
|
+
_cacheDb.prepare(
|
|
37
|
+
"INSERT OR REPLACE INTO llm_cache (cache_key, cache_type, query, intent, result, provider, created_at, hit_count, last_hit_at) VALUES (?, ?, ?, ?, ?, ?, ?, 0, NULL)"
|
|
38
|
+
).run(key, type, query, intent || null, result, provider, (/* @__PURE__ */ new Date()).toISOString());
|
|
39
|
+
} catch {
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
var detectedProvider = null;
|
|
43
|
+
var detectionDone = false;
|
|
44
|
+
function getOpSecret(itemName, fieldLabel) {
|
|
45
|
+
try {
|
|
46
|
+
const saTokenPath = join(homedir(), ".openclaw/secrets/op-sa-token");
|
|
47
|
+
if (!existsSync(saTokenPath)) return void 0;
|
|
48
|
+
const saToken = readFileSync(saTokenPath, "utf-8").trim();
|
|
49
|
+
const result = execSync(
|
|
50
|
+
`OP_SERVICE_ACCOUNT_TOKEN="${saToken}" op item get "${itemName}" --vault "Agent Secrets" --fields "${fieldLabel}" --reveal`,
|
|
51
|
+
{ encoding: "utf-8", timeout: 5e3, stdio: ["pipe", "pipe", "pipe"] }
|
|
52
|
+
).trim();
|
|
53
|
+
return result || void 0;
|
|
54
|
+
} catch {
|
|
55
|
+
return void 0;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
async function detectProvider() {
|
|
59
|
+
if (detectionDone && detectedProvider) return detectedProvider;
|
|
60
|
+
detectionDone = true;
|
|
61
|
+
if (samplingServer) {
|
|
62
|
+
detectedProvider = { provider: "sampling", baseURL: "", apiKey: "", model: "client-selected" };
|
|
63
|
+
process.stderr.write("[memory-crystal] LLM provider: MCP Sampling (via client)\n");
|
|
64
|
+
return detectedProvider;
|
|
65
|
+
}
|
|
66
|
+
try {
|
|
67
|
+
const resp = await fetch("http://localhost:18791/v1/models", { signal: AbortSignal.timeout(1e3) });
|
|
68
|
+
if (resp.ok) {
|
|
69
|
+
const data = await resp.json();
|
|
70
|
+
const model = data?.data?.[0]?.id || "default";
|
|
71
|
+
detectedProvider = { provider: "mlx", baseURL: "http://localhost:18791/v1", apiKey: "not-needed", model };
|
|
72
|
+
process.stderr.write(`[memory-crystal] LLM provider: MLX (${model})
|
|
73
|
+
`);
|
|
74
|
+
return detectedProvider;
|
|
75
|
+
}
|
|
76
|
+
} catch {
|
|
77
|
+
}
|
|
78
|
+
try {
|
|
79
|
+
const resp = await fetch("http://localhost:11434/api/tags", { signal: AbortSignal.timeout(1e3) });
|
|
80
|
+
if (resp.ok) {
|
|
81
|
+
const data = await resp.json();
|
|
82
|
+
const models = data?.models || [];
|
|
83
|
+
const embeddingOnly = ["nomic-embed-text", "mxbai-embed", "all-minilm", "snowflake-arctic-embed"];
|
|
84
|
+
const chatModel = models.find((m) => !embeddingOnly.some((e) => m.name.startsWith(e)));
|
|
85
|
+
if (chatModel) {
|
|
86
|
+
detectedProvider = { provider: "ollama", baseURL: "http://localhost:11434/v1", apiKey: "ollama", model: chatModel.name };
|
|
87
|
+
process.stderr.write(`[memory-crystal] LLM provider: Ollama (${chatModel.name})
|
|
88
|
+
`);
|
|
89
|
+
return detectedProvider;
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
} catch {
|
|
93
|
+
}
|
|
94
|
+
const openaiKey = process.env.OPENAI_API_KEY || getOpSecret("OpenAI API", "api key");
|
|
95
|
+
if (openaiKey) {
|
|
96
|
+
detectedProvider = { provider: "openai", baseURL: "https://api.openai.com/v1", apiKey: openaiKey, model: "gpt-4o-mini" };
|
|
97
|
+
process.stderr.write("[memory-crystal] LLM provider: OpenAI API\n");
|
|
98
|
+
return detectedProvider;
|
|
99
|
+
}
|
|
100
|
+
const anthropicKey = process.env.ANTHROPIC_API_KEY || getOpSecret("Anthropic Auth Token - remote bunkers", "Auth Token");
|
|
101
|
+
if (anthropicKey && !anthropicKey.startsWith("sk-ant-oat")) {
|
|
102
|
+
detectedProvider = { provider: "anthropic", baseURL: "https://api.anthropic.com", apiKey: anthropicKey, model: "claude-haiku-4-5-20251001" };
|
|
103
|
+
process.stderr.write("[memory-crystal] LLM provider: Anthropic API\n");
|
|
104
|
+
return detectedProvider;
|
|
105
|
+
}
|
|
106
|
+
detectedProvider = { provider: "none", baseURL: "", apiKey: "", model: "" };
|
|
107
|
+
process.stderr.write("[memory-crystal] LLM provider: none (deep search unavailable)\n");
|
|
108
|
+
return detectedProvider;
|
|
109
|
+
}
|
|
110
|
+
async function chatComplete(config, messages, maxTokens = 300) {
|
|
111
|
+
if (config.provider === "sampling") {
|
|
112
|
+
return samplingComplete(messages, maxTokens);
|
|
113
|
+
}
|
|
114
|
+
if (config.provider === "anthropic") {
|
|
115
|
+
return anthropicComplete(config, messages, maxTokens);
|
|
116
|
+
}
|
|
117
|
+
const resp = await fetch(`${config.baseURL}/chat/completions`, {
|
|
118
|
+
method: "POST",
|
|
119
|
+
headers: {
|
|
120
|
+
"Content-Type": "application/json",
|
|
121
|
+
"Authorization": `Bearer ${config.apiKey}`
|
|
122
|
+
},
|
|
123
|
+
body: JSON.stringify({
|
|
124
|
+
model: config.model,
|
|
125
|
+
messages,
|
|
126
|
+
max_tokens: maxTokens,
|
|
127
|
+
temperature: 0.7
|
|
128
|
+
})
|
|
129
|
+
});
|
|
130
|
+
if (!resp.ok) throw new Error(`LLM request failed: ${resp.status}`);
|
|
131
|
+
const data = await resp.json();
|
|
132
|
+
return data.choices?.[0]?.message?.content || "";
|
|
133
|
+
}
|
|
134
|
+
async function anthropicComplete(config, messages, maxTokens) {
|
|
135
|
+
const systemMsg = messages.find((m) => m.role === "system");
|
|
136
|
+
const userMessages = messages.filter((m) => m.role !== "system");
|
|
137
|
+
const body = {
|
|
138
|
+
model: config.model,
|
|
139
|
+
max_tokens: maxTokens,
|
|
140
|
+
messages: userMessages
|
|
141
|
+
};
|
|
142
|
+
if (systemMsg) body.system = systemMsg.content;
|
|
143
|
+
const resp = await fetch("https://api.anthropic.com/v1/messages", {
|
|
144
|
+
method: "POST",
|
|
145
|
+
headers: {
|
|
146
|
+
"Content-Type": "application/json",
|
|
147
|
+
"x-api-key": config.apiKey,
|
|
148
|
+
"anthropic-version": "2023-06-01"
|
|
149
|
+
},
|
|
150
|
+
body: JSON.stringify(body)
|
|
151
|
+
});
|
|
152
|
+
if (!resp.ok) throw new Error(`Anthropic request failed: ${resp.status}`);
|
|
153
|
+
const data = await resp.json();
|
|
154
|
+
return data.content?.[0]?.text || "";
|
|
155
|
+
}
|
|
156
|
+
async function samplingComplete(messages, maxTokens) {
|
|
157
|
+
if (!samplingServer) throw new Error("MCP sampling server not set");
|
|
158
|
+
const systemMsg = messages.find((m) => m.role === "system");
|
|
159
|
+
const userMessages = messages.filter((m) => m.role !== "system");
|
|
160
|
+
const result = await samplingServer.createMessage({
|
|
161
|
+
messages: userMessages.map((m) => ({
|
|
162
|
+
role: m.role,
|
|
163
|
+
content: { type: "text", text: m.content }
|
|
164
|
+
})),
|
|
165
|
+
systemPrompt: systemMsg?.content,
|
|
166
|
+
maxTokens,
|
|
167
|
+
modelPreferences: {
|
|
168
|
+
// Request cheap, fast model (Haiku-class). We don't need Opus for query expansion.
|
|
169
|
+
costPriority: 0.9,
|
|
170
|
+
speedPriority: 0.8,
|
|
171
|
+
intelligencePriority: 0.3,
|
|
172
|
+
hints: [{ name: "haiku" }]
|
|
173
|
+
}
|
|
174
|
+
});
|
|
175
|
+
if (result?.content?.type === "text") return result.content.text;
|
|
176
|
+
if (typeof result?.content === "string") return result.content;
|
|
177
|
+
return "";
|
|
178
|
+
}
|
|
179
|
+
var EXPAND_PROMPT = `You are a search query expander. Given a search query, generate exactly 3 variations to improve search recall.
|
|
180
|
+
|
|
181
|
+
Output exactly 3 lines in this format (no other text):
|
|
182
|
+
lex: <keyword-focused variation for full-text search>
|
|
183
|
+
vec: <semantic variation rephrased for embedding similarity>
|
|
184
|
+
hyde: <hypothetical document snippet that would answer this query>
|
|
185
|
+
|
|
186
|
+
Rules:
|
|
187
|
+
- Each variation must contain at least one term from the original query
|
|
188
|
+
- Keep variations concise (under 30 words each)
|
|
189
|
+
- lex should use specific keywords and synonyms
|
|
190
|
+
- vec should rephrase the intent naturally
|
|
191
|
+
- hyde should be a short passage as if answering the query`;
|
|
192
|
+
async function expandQuery(query, intent) {
|
|
193
|
+
const cacheKey = intent ? `expand:${query}||${intent}` : `expand:${query}`;
|
|
194
|
+
const dbCached = dbCacheGet(cacheKey);
|
|
195
|
+
if (dbCached) {
|
|
196
|
+
try {
|
|
197
|
+
return JSON.parse(dbCached);
|
|
198
|
+
} catch {
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
const cached = expansionCache.get(cacheKey);
|
|
202
|
+
if (cached) return cached;
|
|
203
|
+
const config = await detectProvider();
|
|
204
|
+
if (config.provider === "none") return [];
|
|
205
|
+
try {
|
|
206
|
+
const intentContext = intent ? `
|
|
207
|
+
Query intent: ${intent}. Use this to guide your variations toward the intended domain.` : "";
|
|
208
|
+
const result = await chatComplete(config, [
|
|
209
|
+
{ role: "system", content: EXPAND_PROMPT + intentContext },
|
|
210
|
+
{ role: "user", content: query }
|
|
211
|
+
], 300);
|
|
212
|
+
const lines = result.trim().split("\n");
|
|
213
|
+
const queryLower = query.toLowerCase();
|
|
214
|
+
const queryTerms = queryLower.replace(/[^a-z0-9\s]/g, " ").split(/\s+/).filter(Boolean);
|
|
215
|
+
const hasQueryTerm = (text) => {
|
|
216
|
+
const lower = text.toLowerCase();
|
|
217
|
+
if (queryTerms.length === 0) return true;
|
|
218
|
+
return queryTerms.some((term) => lower.includes(term));
|
|
219
|
+
};
|
|
220
|
+
const variations = lines.map((line) => {
|
|
221
|
+
const colonIdx = line.indexOf(":");
|
|
222
|
+
if (colonIdx === -1) return null;
|
|
223
|
+
const type = line.slice(0, colonIdx).trim();
|
|
224
|
+
if (type !== "lex" && type !== "vec" && type !== "hyde") return null;
|
|
225
|
+
const text = line.slice(colonIdx + 1).trim();
|
|
226
|
+
if (!text || !hasQueryTerm(text)) return null;
|
|
227
|
+
return { type, text };
|
|
228
|
+
}).filter((v) => v !== null);
|
|
229
|
+
if (variations.length > 0) {
|
|
230
|
+
expansionCache.set(cacheKey, variations);
|
|
231
|
+
dbCacheSet(cacheKey, "expansion", query, intent, JSON.stringify(variations), config.provider);
|
|
232
|
+
return variations;
|
|
233
|
+
}
|
|
234
|
+
} catch (err) {
|
|
235
|
+
process.stderr.write(`[memory-crystal] Query expansion failed: ${err.message}
|
|
236
|
+
`);
|
|
237
|
+
}
|
|
238
|
+
const fallback = [
|
|
239
|
+
{ type: "lex", text: query },
|
|
240
|
+
{ type: "vec", text: query },
|
|
241
|
+
{ type: "hyde", text: `Information about ${query}` }
|
|
242
|
+
];
|
|
243
|
+
return fallback;
|
|
244
|
+
}
|
|
245
|
+
var RERANK_PROMPT = `You are a search result re-ranker. Given a query and a list of text passages, rate each passage's relevance to the query.
|
|
246
|
+
|
|
247
|
+
Output one line per passage in this exact format:
|
|
248
|
+
<index>: <score>
|
|
249
|
+
|
|
250
|
+
Where index is the passage number (0-based) and score is a float from 0.0 to 1.0.
|
|
251
|
+
- 1.0 = perfectly relevant, directly answers the query
|
|
252
|
+
- 0.7 = highly relevant, closely related
|
|
253
|
+
- 0.4 = somewhat relevant, tangentially related
|
|
254
|
+
- 0.1 = barely relevant
|
|
255
|
+
- 0.0 = not relevant at all
|
|
256
|
+
|
|
257
|
+
Rate ALL passages. Output nothing else.`;
|
|
258
|
+
async function rerankResults(query, passages) {
|
|
259
|
+
const config = await detectProvider();
|
|
260
|
+
if (config.provider === "none") {
|
|
261
|
+
return passages.map((_, i) => ({ index: i, score: 1 - i * 0.01 }));
|
|
262
|
+
}
|
|
263
|
+
const { createHash } = await import("crypto");
|
|
264
|
+
const contentHash = createHash("sha256").update(passages.map((p) => p.slice(0, 200)).sort().join("|")).digest("hex").slice(0, 16);
|
|
265
|
+
const rerankCacheKey = `rerank:${query}||${contentHash}`;
|
|
266
|
+
const dbCachedRerank = dbCacheGet(rerankCacheKey);
|
|
267
|
+
if (dbCachedRerank) {
|
|
268
|
+
try {
|
|
269
|
+
return JSON.parse(dbCachedRerank);
|
|
270
|
+
} catch {
|
|
271
|
+
}
|
|
272
|
+
}
|
|
273
|
+
try {
|
|
274
|
+
const passageList = passages.map((p, i) => `[${i}] ${p.slice(0, 500)}`).join("\n\n");
|
|
275
|
+
const result = await chatComplete(config, [
|
|
276
|
+
{ role: "system", content: RERANK_PROMPT },
|
|
277
|
+
{ role: "user", content: `Query: ${query}
|
|
278
|
+
|
|
279
|
+
Passages:
|
|
280
|
+
${passageList}` }
|
|
281
|
+
], 200);
|
|
282
|
+
const results = [];
|
|
283
|
+
for (const line of result.trim().split("\n")) {
|
|
284
|
+
const match = line.match(/^(\d+):\s*([\d.]+)/);
|
|
285
|
+
if (match) {
|
|
286
|
+
results.push({ index: parseInt(match[1]), score: parseFloat(match[2]) });
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
const scored = new Set(results.map((r) => r.index));
|
|
290
|
+
for (let i = 0; i < passages.length; i++) {
|
|
291
|
+
if (!scored.has(i)) results.push({ index: i, score: 0 });
|
|
292
|
+
}
|
|
293
|
+
const sorted = results.sort((a, b) => b.score - a.score);
|
|
294
|
+
dbCacheSet(rerankCacheKey, "rerank", query, void 0, JSON.stringify(sorted), config.provider);
|
|
295
|
+
return sorted;
|
|
296
|
+
} catch (err) {
|
|
297
|
+
process.stderr.write(`[memory-crystal] Reranking failed: ${err.message}
|
|
298
|
+
`);
|
|
299
|
+
return passages.map((_, i) => ({ index: i, score: 1 - i * 0.01 }));
|
|
300
|
+
}
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
export {
|
|
304
|
+
setSamplingServer,
|
|
305
|
+
hasSampling,
|
|
306
|
+
setLLMCacheDb,
|
|
307
|
+
detectProvider,
|
|
308
|
+
expandQuery,
|
|
309
|
+
rerankResults
|
|
310
|
+
};
|