@gamaze/hicortex 0.2.1 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +77 -37
- package/dist/claude-md.d.ts +28 -0
- package/dist/claude-md.js +142 -0
- package/dist/cli.d.ts +12 -0
- package/dist/cli.js +84 -0
- package/dist/consolidate.js +1 -1
- package/dist/db.d.ts +10 -0
- package/dist/db.js +74 -0
- package/dist/index.js +10 -14
- package/dist/init.d.ts +17 -0
- package/dist/init.js +397 -0
- package/dist/llm.d.ts +20 -2
- package/dist/llm.js +82 -2
- package/dist/mcp-server.d.ts +19 -0
- package/dist/mcp-server.js +266 -0
- package/dist/nightly.d.ts +15 -0
- package/dist/nightly.js +167 -0
- package/dist/status.d.ts +4 -0
- package/dist/status.js +120 -0
- package/dist/transcript-reader.d.ts +20 -0
- package/dist/transcript-reader.js +126 -0
- package/dist/uninstall.d.ts +5 -0
- package/dist/uninstall.js +84 -0
- package/openclaw.plugin.json +1 -1
- package/package.json +17 -9
package/dist/init.js
ADDED
|
@@ -0,0 +1,397 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Hicortex init — detect existing setup and configure for CC.
|
|
4
|
+
*
|
|
5
|
+
* Detection:
|
|
6
|
+
* 1. Local HC server running (localhost:8787)
|
|
7
|
+
* 2. Remote HC server (HICORTEX_SERVER_URL or bedrock:8787)
|
|
8
|
+
* 3. OC plugin installed (~/.openclaw/openclaw.json)
|
|
9
|
+
* 4. CC MCP already registered (~/.claude/settings.json)
|
|
10
|
+
* 5. Existing DB (~/.hicortex/ or ~/.openclaw/data/)
|
|
11
|
+
*
|
|
12
|
+
* Actions:
|
|
13
|
+
* - Install persistent daemon (launchd/systemd)
|
|
14
|
+
* - Register MCP server in CC settings
|
|
15
|
+
* - Inject CLAUDE.md learnings block
|
|
16
|
+
* - Install CC custom commands (/learn, /hicortex-activate)
|
|
17
|
+
*/
|
|
18
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
19
|
+
exports.runInit = runInit;
|
|
20
|
+
const node_fs_1 = require("node:fs");
|
|
21
|
+
const node_path_1 = require("node:path");
|
|
22
|
+
const node_os_1 = require("node:os");
|
|
23
|
+
const node_child_process_1 = require("node:child_process");
|
|
24
|
+
const node_readline_1 = require("node:readline");
|
|
25
|
+
const HICORTEX_HOME = (0, node_path_1.join)((0, node_os_1.homedir)(), ".hicortex");
|
|
26
|
+
const CC_SETTINGS = (0, node_path_1.join)((0, node_os_1.homedir)(), ".claude", "settings.json");
|
|
27
|
+
const CC_COMMANDS_DIR = (0, node_path_1.join)((0, node_os_1.homedir)(), ".claude", "commands");
|
|
28
|
+
const OC_CONFIG = (0, node_path_1.join)((0, node_os_1.homedir)(), ".openclaw", "openclaw.json");
|
|
29
|
+
const DEFAULT_PORT = 8787;
|
|
30
|
+
async function detect() {
|
|
31
|
+
const result = {
|
|
32
|
+
localServer: false,
|
|
33
|
+
remoteServer: false,
|
|
34
|
+
ocPlugin: false,
|
|
35
|
+
ccMcpRegistered: false,
|
|
36
|
+
existingDb: false,
|
|
37
|
+
};
|
|
38
|
+
// Check local server
|
|
39
|
+
try {
|
|
40
|
+
const resp = await fetch(`http://127.0.0.1:${DEFAULT_PORT}/health`, {
|
|
41
|
+
signal: AbortSignal.timeout(2000),
|
|
42
|
+
});
|
|
43
|
+
if (resp.ok) {
|
|
44
|
+
result.localServer = true;
|
|
45
|
+
result.localServerUrl = `http://127.0.0.1:${DEFAULT_PORT}`;
|
|
46
|
+
const data = await resp.json();
|
|
47
|
+
result.memoryCount = data.memories;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
catch { /* not running */ }
|
|
51
|
+
// Check remote server (env var)
|
|
52
|
+
const remoteUrl = process.env.HICORTEX_SERVER_URL;
|
|
53
|
+
if (remoteUrl && !result.localServer) {
|
|
54
|
+
try {
|
|
55
|
+
const resp = await fetch(`${remoteUrl}/health`, {
|
|
56
|
+
signal: AbortSignal.timeout(2000),
|
|
57
|
+
});
|
|
58
|
+
if (resp.ok) {
|
|
59
|
+
result.remoteServer = true;
|
|
60
|
+
result.remoteServerUrl = remoteUrl;
|
|
61
|
+
const data = await resp.json();
|
|
62
|
+
result.memoryCount = data.memories;
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
catch { /* not reachable */ }
|
|
66
|
+
}
|
|
67
|
+
// Check OC plugin
|
|
68
|
+
try {
|
|
69
|
+
const raw = (0, node_fs_1.readFileSync)(OC_CONFIG, "utf-8");
|
|
70
|
+
const config = JSON.parse(raw);
|
|
71
|
+
const entries = config?.plugins?.entries ?? {};
|
|
72
|
+
const installs = config?.plugins?.installs ?? {};
|
|
73
|
+
result.ocPlugin = "hicortex" in entries || "hicortex" in installs || "hicortex-memory" in entries;
|
|
74
|
+
}
|
|
75
|
+
catch { /* no OC config */ }
|
|
76
|
+
// Check CC MCP registration
|
|
77
|
+
try {
|
|
78
|
+
const raw = (0, node_fs_1.readFileSync)(CC_SETTINGS, "utf-8");
|
|
79
|
+
const settings = JSON.parse(raw);
|
|
80
|
+
result.ccMcpRegistered = "hicortex" in (settings?.mcpServers ?? {});
|
|
81
|
+
}
|
|
82
|
+
catch { /* no CC settings */ }
|
|
83
|
+
// Check existing DB
|
|
84
|
+
const canonicalDb = (0, node_path_1.join)(HICORTEX_HOME, "hicortex.db");
|
|
85
|
+
const legacyDb = (0, node_path_1.join)((0, node_os_1.homedir)(), ".openclaw", "data", "hicortex.db");
|
|
86
|
+
if ((0, node_fs_1.existsSync)(canonicalDb)) {
|
|
87
|
+
result.existingDb = true;
|
|
88
|
+
result.dbPath = canonicalDb;
|
|
89
|
+
}
|
|
90
|
+
else if ((0, node_fs_1.existsSync)(legacyDb)) {
|
|
91
|
+
result.existingDb = true;
|
|
92
|
+
result.dbPath = legacyDb;
|
|
93
|
+
}
|
|
94
|
+
return result;
|
|
95
|
+
}
|
|
96
|
+
// ---------------------------------------------------------------------------
|
|
97
|
+
// Actions
|
|
98
|
+
// ---------------------------------------------------------------------------
|
|
99
|
+
function registerCcMcp(serverUrl) {
|
|
100
|
+
let settings = {};
|
|
101
|
+
try {
|
|
102
|
+
settings = JSON.parse((0, node_fs_1.readFileSync)(CC_SETTINGS, "utf-8"));
|
|
103
|
+
}
|
|
104
|
+
catch { /* create new */ }
|
|
105
|
+
if (!settings.mcpServers)
|
|
106
|
+
settings.mcpServers = {};
|
|
107
|
+
settings.mcpServers.hicortex = {
|
|
108
|
+
type: "http",
|
|
109
|
+
url: `${serverUrl}/sse`,
|
|
110
|
+
};
|
|
111
|
+
(0, node_fs_1.mkdirSync)((0, node_path_1.dirname)(CC_SETTINGS), { recursive: true });
|
|
112
|
+
(0, node_fs_1.writeFileSync)(CC_SETTINGS, JSON.stringify(settings, null, 2));
|
|
113
|
+
console.log(` ✓ Registered MCP server in ${CC_SETTINGS}`);
|
|
114
|
+
}
|
|
115
|
+
function installCcCommands() {
|
|
116
|
+
(0, node_fs_1.mkdirSync)(CC_COMMANDS_DIR, { recursive: true });
|
|
117
|
+
// /learn command
|
|
118
|
+
const learnContent = `# Save Learning to Hicortex
|
|
119
|
+
|
|
120
|
+
When invoked with \`/learn <text>\`, store the learning in long-term memory.
|
|
121
|
+
|
|
122
|
+
## Steps
|
|
123
|
+
|
|
124
|
+
1. Parse the text after \`/learn\`
|
|
125
|
+
2. Clean it up into a clear, self-contained statement
|
|
126
|
+
3. Add today's date
|
|
127
|
+
4. Call the \`hicortex_ingest\` tool with:
|
|
128
|
+
- \`content\`: The learning text prefixed with "LEARNING: "
|
|
129
|
+
- \`project\`: "global" (unless clearly project-specific)
|
|
130
|
+
- \`memory_type\`: "lesson"
|
|
131
|
+
5. Confirm what was saved (brief, one line)
|
|
132
|
+
`;
|
|
133
|
+
(0, node_fs_1.writeFileSync)((0, node_path_1.join)(CC_COMMANDS_DIR, "learn.md"), learnContent);
|
|
134
|
+
// /hicortex-activate command
|
|
135
|
+
const activateContent = `# Activate Hicortex License
|
|
136
|
+
|
|
137
|
+
When the user wants to activate their license key:
|
|
138
|
+
|
|
139
|
+
## If key provided (e.g. /hicortex-activate hctx-abc123)
|
|
140
|
+
|
|
141
|
+
Write the key to the config file:
|
|
142
|
+
|
|
143
|
+
\`\`\`bash
|
|
144
|
+
mkdir -p ~/.hicortex
|
|
145
|
+
cat > ~/.hicortex/config.json << 'EOF'
|
|
146
|
+
{ "licenseKey": "THE_KEY_HERE" }
|
|
147
|
+
EOF
|
|
148
|
+
\`\`\`
|
|
149
|
+
|
|
150
|
+
Then tell the user: "License activated! Hicortex now has unlimited memory. Restart the server to apply: \`launchctl stop com.gamaze.hicortex && launchctl start com.gamaze.hicortex\`"
|
|
151
|
+
|
|
152
|
+
## If no key provided
|
|
153
|
+
|
|
154
|
+
Tell them: "Get a license key at https://hicortex.gamaze.com/ — after purchase, you'll receive your key by email. Then tell me the key and I'll activate it."
|
|
155
|
+
`;
|
|
156
|
+
(0, node_fs_1.writeFileSync)((0, node_path_1.join)(CC_COMMANDS_DIR, "hicortex-activate.md"), activateContent);
|
|
157
|
+
console.log(` ✓ Installed /learn and /hicortex-activate commands in ${CC_COMMANDS_DIR}`);
|
|
158
|
+
}
|
|
159
|
+
function installDaemon() {
|
|
160
|
+
const os = (0, node_os_1.platform)();
|
|
161
|
+
const npxPath = findNpxPath();
|
|
162
|
+
if (os === "darwin") {
|
|
163
|
+
return installLaunchd(npxPath);
|
|
164
|
+
}
|
|
165
|
+
else if (os === "linux") {
|
|
166
|
+
return installSystemd(npxPath);
|
|
167
|
+
}
|
|
168
|
+
else {
|
|
169
|
+
console.log(` ⚠ Unsupported platform: ${os}. Start the server manually: npx @gamaze/hicortex server`);
|
|
170
|
+
return false;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
function findNpxPath() {
|
|
174
|
+
try {
|
|
175
|
+
return (0, node_child_process_1.execSync)("which npx", { encoding: "utf-8" }).trim();
|
|
176
|
+
}
|
|
177
|
+
catch {
|
|
178
|
+
return "/usr/local/bin/npx";
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
function installLaunchd(npxPath) {
|
|
182
|
+
const plistDir = (0, node_path_1.join)((0, node_os_1.homedir)(), "Library", "LaunchAgents");
|
|
183
|
+
const plistPath = (0, node_path_1.join)(plistDir, "com.gamaze.hicortex.plist");
|
|
184
|
+
const logPath = (0, node_path_1.join)(HICORTEX_HOME, "server.log");
|
|
185
|
+
const errLogPath = (0, node_path_1.join)(HICORTEX_HOME, "server-err.log");
|
|
186
|
+
const plist = `<?xml version="1.0" encoding="UTF-8"?>
|
|
187
|
+
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
|
188
|
+
<plist version="1.0">
|
|
189
|
+
<dict>
|
|
190
|
+
<key>Label</key>
|
|
191
|
+
<string>com.gamaze.hicortex</string>
|
|
192
|
+
<key>ProgramArguments</key>
|
|
193
|
+
<array>
|
|
194
|
+
<string>${npxPath}</string>
|
|
195
|
+
<string>-y</string>
|
|
196
|
+
<string>@gamaze/hicortex</string>
|
|
197
|
+
<string>server</string>
|
|
198
|
+
</array>
|
|
199
|
+
<key>KeepAlive</key>
|
|
200
|
+
<true/>
|
|
201
|
+
<key>RunAtLoad</key>
|
|
202
|
+
<true/>
|
|
203
|
+
<key>StandardOutPath</key>
|
|
204
|
+
<string>${logPath}</string>
|
|
205
|
+
<key>StandardErrorPath</key>
|
|
206
|
+
<string>${errLogPath}</string>
|
|
207
|
+
<key>EnvironmentVariables</key>
|
|
208
|
+
<dict>
|
|
209
|
+
<key>PATH</key>
|
|
210
|
+
<string>${(0, node_path_1.dirname)(npxPath)}:/usr/local/bin:/usr/bin:/bin</string>
|
|
211
|
+
</dict>
|
|
212
|
+
</dict>
|
|
213
|
+
</plist>`;
|
|
214
|
+
(0, node_fs_1.mkdirSync)(plistDir, { recursive: true });
|
|
215
|
+
(0, node_fs_1.mkdirSync)(HICORTEX_HOME, { recursive: true });
|
|
216
|
+
(0, node_fs_1.writeFileSync)(plistPath, plist);
|
|
217
|
+
try {
|
|
218
|
+
// Unload first if already loaded (idempotent)
|
|
219
|
+
try {
|
|
220
|
+
(0, node_child_process_1.execSync)(`launchctl unload ${plistPath} 2>/dev/null`);
|
|
221
|
+
}
|
|
222
|
+
catch { /* not loaded */ }
|
|
223
|
+
(0, node_child_process_1.execSync)(`launchctl load ${plistPath}`);
|
|
224
|
+
console.log(` ✓ Installed launchd daemon: ${plistPath}`);
|
|
225
|
+
return true;
|
|
226
|
+
}
|
|
227
|
+
catch (err) {
|
|
228
|
+
console.error(` ✗ Failed to load launchd plist: ${err}`);
|
|
229
|
+
return false;
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
function installSystemd(npxPath) {
|
|
233
|
+
const unitDir = (0, node_path_1.join)((0, node_os_1.homedir)(), ".config", "systemd", "user");
|
|
234
|
+
const servicePath = (0, node_path_1.join)(unitDir, "hicortex.service");
|
|
235
|
+
const service = `[Unit]
|
|
236
|
+
Description=Hicortex MCP server — long-term memory for AI agents
|
|
237
|
+
|
|
238
|
+
[Service]
|
|
239
|
+
Type=simple
|
|
240
|
+
ExecStart=${npxPath} -y @gamaze/hicortex server
|
|
241
|
+
Restart=on-failure
|
|
242
|
+
RestartSec=10
|
|
243
|
+
StandardOutput=journal
|
|
244
|
+
StandardError=journal
|
|
245
|
+
Environment=PATH=${(0, node_path_1.dirname)(npxPath)}:/usr/local/bin:/usr/bin:/bin
|
|
246
|
+
|
|
247
|
+
[Install]
|
|
248
|
+
WantedBy=default.target
|
|
249
|
+
`;
|
|
250
|
+
(0, node_fs_1.mkdirSync)(unitDir, { recursive: true });
|
|
251
|
+
(0, node_fs_1.writeFileSync)(servicePath, service);
|
|
252
|
+
try {
|
|
253
|
+
(0, node_child_process_1.execSync)("systemctl --user daemon-reload");
|
|
254
|
+
(0, node_child_process_1.execSync)("systemctl --user enable --now hicortex.service");
|
|
255
|
+
console.log(` ✓ Installed systemd service: ${servicePath}`);
|
|
256
|
+
return true;
|
|
257
|
+
}
|
|
258
|
+
catch (err) {
|
|
259
|
+
console.error(` ✗ Failed to enable systemd service: ${err}`);
|
|
260
|
+
return false;
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
// ---------------------------------------------------------------------------
|
|
264
|
+
// Interactive prompt
|
|
265
|
+
// ---------------------------------------------------------------------------
|
|
266
|
+
async function ask(question) {
|
|
267
|
+
const rl = (0, node_readline_1.createInterface)({ input: process.stdin, output: process.stdout });
|
|
268
|
+
return new Promise((resolve) => {
|
|
269
|
+
rl.question(question, (answer) => {
|
|
270
|
+
rl.close();
|
|
271
|
+
resolve(answer.trim());
|
|
272
|
+
});
|
|
273
|
+
});
|
|
274
|
+
}
|
|
275
|
+
// ---------------------------------------------------------------------------
|
|
276
|
+
// Main
|
|
277
|
+
// ---------------------------------------------------------------------------
|
|
278
|
+
async function runInit() {
|
|
279
|
+
console.log("Hicortex — Setup for Claude Code\n");
|
|
280
|
+
// Phase 1: Detect
|
|
281
|
+
console.log("Detecting existing setup...\n");
|
|
282
|
+
const d = await detect();
|
|
283
|
+
// Phase 2: Report
|
|
284
|
+
console.log("Found:");
|
|
285
|
+
if (d.localServer)
|
|
286
|
+
console.log(` • Local server running at ${d.localServerUrl} (${d.memoryCount ?? "?"} memories)`);
|
|
287
|
+
if (d.remoteServer)
|
|
288
|
+
console.log(` • Remote server at ${d.remoteServerUrl} (${d.memoryCount ?? "?"} memories)`);
|
|
289
|
+
if (d.ocPlugin)
|
|
290
|
+
console.log(" • OpenClaw plugin installed");
|
|
291
|
+
if (d.ccMcpRegistered)
|
|
292
|
+
console.log(" • CC MCP already registered");
|
|
293
|
+
if (d.existingDb)
|
|
294
|
+
console.log(` • Database at ${d.dbPath}`);
|
|
295
|
+
if (!d.localServer && !d.remoteServer && !d.ocPlugin && !d.existingDb) {
|
|
296
|
+
console.log(" • Fresh install (no existing Hicortex found)");
|
|
297
|
+
}
|
|
298
|
+
console.log();
|
|
299
|
+
// Determine server URL
|
|
300
|
+
let serverUrl;
|
|
301
|
+
if (d.localServer) {
|
|
302
|
+
serverUrl = d.localServerUrl;
|
|
303
|
+
console.log(`Using existing local server at ${serverUrl}`);
|
|
304
|
+
}
|
|
305
|
+
else if (d.remoteServer) {
|
|
306
|
+
serverUrl = d.remoteServerUrl;
|
|
307
|
+
console.log(`Using remote server at ${serverUrl}`);
|
|
308
|
+
}
|
|
309
|
+
else {
|
|
310
|
+
serverUrl = `http://127.0.0.1:${DEFAULT_PORT}`;
|
|
311
|
+
console.log("No running server found. Will install a local daemon.");
|
|
312
|
+
}
|
|
313
|
+
console.log();
|
|
314
|
+
// Phase 2.5: Actions summary
|
|
315
|
+
const actions = [];
|
|
316
|
+
if (!d.localServer && !d.remoteServer)
|
|
317
|
+
actions.push("Install Hicortex server daemon");
|
|
318
|
+
if (!d.ccMcpRegistered)
|
|
319
|
+
actions.push("Register MCP server in CC settings");
|
|
320
|
+
actions.push("Install /learn and /hicortex-activate commands");
|
|
321
|
+
actions.push("Add Hicortex Learnings block to CLAUDE.md");
|
|
322
|
+
if (actions.length === 0) {
|
|
323
|
+
console.log("Everything is already configured. Nothing to do.");
|
|
324
|
+
return;
|
|
325
|
+
}
|
|
326
|
+
console.log("Actions:");
|
|
327
|
+
actions.forEach((a) => console.log(` - ${a}`));
|
|
328
|
+
console.log();
|
|
329
|
+
const answer = await ask("Continue? [Y/n] ");
|
|
330
|
+
if (answer.toLowerCase() === "n") {
|
|
331
|
+
console.log("Cancelled.");
|
|
332
|
+
return;
|
|
333
|
+
}
|
|
334
|
+
console.log();
|
|
335
|
+
// Phase 3: Execute
|
|
336
|
+
// Install daemon if needed
|
|
337
|
+
if (!d.localServer && !d.remoteServer) {
|
|
338
|
+
installDaemon();
|
|
339
|
+
// Give daemon a moment to start
|
|
340
|
+
console.log(" ⏳ Waiting for server to start...");
|
|
341
|
+
await new Promise((r) => setTimeout(r, 5000));
|
|
342
|
+
// Verify
|
|
343
|
+
try {
|
|
344
|
+
const resp = await fetch(`${serverUrl}/health`, { signal: AbortSignal.timeout(3000) });
|
|
345
|
+
if (resp.ok) {
|
|
346
|
+
console.log(" ✓ Server is running");
|
|
347
|
+
}
|
|
348
|
+
else {
|
|
349
|
+
console.log(" ⚠ Server started but health check returned non-200. Check logs at ~/.hicortex/server.log");
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
catch {
|
|
353
|
+
console.log(" ⚠ Server may still be starting. Check: curl http://127.0.0.1:8787/health");
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
// Register MCP
|
|
357
|
+
if (!d.ccMcpRegistered) {
|
|
358
|
+
registerCcMcp(serverUrl);
|
|
359
|
+
}
|
|
360
|
+
// Install CC commands
|
|
361
|
+
installCcCommands();
|
|
362
|
+
// Inject CLAUDE.md (only if server has a DB we can read)
|
|
363
|
+
// For now, just create the block with agent guidance and no lessons
|
|
364
|
+
// Lessons will populate on first nightly run
|
|
365
|
+
const claudeMdPath = (0, node_path_1.join)((0, node_os_1.homedir)(), ".claude", "CLAUDE.md");
|
|
366
|
+
if (!(0, node_fs_1.existsSync)(claudeMdPath) || !(0, node_fs_1.readFileSync)(claudeMdPath, "utf-8").includes("HICORTEX-LEARNINGS")) {
|
|
367
|
+
(0, node_fs_1.mkdirSync)((0, node_path_1.dirname)(claudeMdPath), { recursive: true });
|
|
368
|
+
let content = "";
|
|
369
|
+
try {
|
|
370
|
+
content = (0, node_fs_1.readFileSync)(claudeMdPath, "utf-8");
|
|
371
|
+
}
|
|
372
|
+
catch { /* new file */ }
|
|
373
|
+
const block = [
|
|
374
|
+
"<!-- HICORTEX-LEARNINGS:START -->",
|
|
375
|
+
"## Hicortex Learnings",
|
|
376
|
+
"",
|
|
377
|
+
"You have access to long-term memory via Hicortex MCP tools. Use `hicortex_search` when you need context from past sessions, decisions, or prior work. Use `hicortex_context` at session start to recall recent project state. Use `hicortex_ingest` to save important decisions or learnings. Sessions are auto-captured nightly.",
|
|
378
|
+
"<!-- HICORTEX-LEARNINGS:END -->",
|
|
379
|
+
].join("\n");
|
|
380
|
+
if (content.length > 0 && !content.endsWith("\n"))
|
|
381
|
+
content += "\n";
|
|
382
|
+
if (content.length > 0)
|
|
383
|
+
content += "\n";
|
|
384
|
+
content += block + "\n";
|
|
385
|
+
(0, node_fs_1.writeFileSync)(claudeMdPath, content);
|
|
386
|
+
console.log(` ✓ Added Hicortex Learnings block to ${claudeMdPath}`);
|
|
387
|
+
}
|
|
388
|
+
else {
|
|
389
|
+
console.log(` ✓ CLAUDE.md already has Hicortex Learnings block`);
|
|
390
|
+
}
|
|
391
|
+
console.log("\n✓ Hicortex setup complete!\n");
|
|
392
|
+
console.log("Next steps:");
|
|
393
|
+
console.log(" 1. Restart Claude Code to pick up the new MCP server");
|
|
394
|
+
console.log(" 2. Ask your agent: 'What Hicortex tools do you have?'");
|
|
395
|
+
console.log(" 3. Try /learn to save something to long-term memory");
|
|
396
|
+
console.log(` 4. Check server: curl ${serverUrl}/health`);
|
|
397
|
+
}
|
package/dist/llm.d.ts
CHANGED
|
@@ -2,13 +2,20 @@
|
|
|
2
2
|
* Multi-provider LLM client for consolidation and distillation.
|
|
3
3
|
* Ported from hicortex/consolidate/llm.py.
|
|
4
4
|
*
|
|
5
|
-
* Resolution
|
|
5
|
+
* Resolution for OC adapter (resolveLlmConfig):
|
|
6
6
|
* 1. Plugin config (llmBaseUrl, llmApiKey, llmModel)
|
|
7
7
|
* 2. ~/.openclaw/openclaw.json agents.defaults.model.primary
|
|
8
8
|
* 3. Environment vars: OPENAI_API_KEY, ANTHROPIC_API_KEY, GOOGLE_API_KEY
|
|
9
9
|
* 4. Fallback: Ollama at http://localhost:11434
|
|
10
10
|
*
|
|
11
|
-
*
|
|
11
|
+
* Resolution for CC adapter (resolveLlmConfigForCC):
|
|
12
|
+
* 1. Explicit env vars (HICORTEX_LLM_BASE_URL + HICORTEX_LLM_API_KEY + HICORTEX_LLM_MODEL)
|
|
13
|
+
* 2. ANTHROPIC_API_KEY → Haiku (cheap, CC users always have this)
|
|
14
|
+
* 3. OPENAI_API_KEY → gpt-4o-mini
|
|
15
|
+
* 4. GOOGLE_API_KEY → gemini-2.0-flash
|
|
16
|
+
* 5. Fallback: Ollama at http://localhost:11434
|
|
17
|
+
*
|
|
18
|
+
* Supports: OpenAI, Anthropic, Google, OpenRouter, Ollama, z.ai, and 15+ more
|
|
12
19
|
*/
|
|
13
20
|
export interface LlmConfig {
|
|
14
21
|
baseUrl: string;
|
|
@@ -26,6 +33,17 @@ export declare function resolveLlmConfig(pluginConfig?: {
|
|
|
26
33
|
llmModel?: string;
|
|
27
34
|
reflectModel?: string;
|
|
28
35
|
}): LlmConfig;
|
|
36
|
+
/**
|
|
37
|
+
* Resolve LLM configuration for Claude Code (no OC config file).
|
|
38
|
+
* Uses env vars only — CC users always have ANTHROPIC_API_KEY.
|
|
39
|
+
* Defaults to Haiku for distillation/scoring (~$0.50/mo).
|
|
40
|
+
*/
|
|
41
|
+
export declare function resolveLlmConfigForCC(overrides?: {
|
|
42
|
+
llmBaseUrl?: string;
|
|
43
|
+
llmApiKey?: string;
|
|
44
|
+
llmModel?: string;
|
|
45
|
+
reflectModel?: string;
|
|
46
|
+
}): LlmConfig;
|
|
29
47
|
export declare class RateLimitError extends Error {
|
|
30
48
|
retryAfterMs: number;
|
|
31
49
|
constructor(retryAfterMs: number);
|
package/dist/llm.js
CHANGED
|
@@ -3,17 +3,25 @@
|
|
|
3
3
|
* Multi-provider LLM client for consolidation and distillation.
|
|
4
4
|
* Ported from hicortex/consolidate/llm.py.
|
|
5
5
|
*
|
|
6
|
-
* Resolution
|
|
6
|
+
* Resolution for OC adapter (resolveLlmConfig):
|
|
7
7
|
* 1. Plugin config (llmBaseUrl, llmApiKey, llmModel)
|
|
8
8
|
* 2. ~/.openclaw/openclaw.json agents.defaults.model.primary
|
|
9
9
|
* 3. Environment vars: OPENAI_API_KEY, ANTHROPIC_API_KEY, GOOGLE_API_KEY
|
|
10
10
|
* 4. Fallback: Ollama at http://localhost:11434
|
|
11
11
|
*
|
|
12
|
-
*
|
|
12
|
+
* Resolution for CC adapter (resolveLlmConfigForCC):
|
|
13
|
+
* 1. Explicit env vars (HICORTEX_LLM_BASE_URL + HICORTEX_LLM_API_KEY + HICORTEX_LLM_MODEL)
|
|
14
|
+
* 2. ANTHROPIC_API_KEY → Haiku (cheap, CC users always have this)
|
|
15
|
+
* 3. OPENAI_API_KEY → gpt-4o-mini
|
|
16
|
+
* 4. GOOGLE_API_KEY → gemini-2.0-flash
|
|
17
|
+
* 5. Fallback: Ollama at http://localhost:11434
|
|
18
|
+
*
|
|
19
|
+
* Supports: OpenAI, Anthropic, Google, OpenRouter, Ollama, z.ai, and 15+ more
|
|
13
20
|
*/
|
|
14
21
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
22
|
exports.LlmClient = exports.RateLimitError = void 0;
|
|
16
23
|
exports.resolveLlmConfig = resolveLlmConfig;
|
|
24
|
+
exports.resolveLlmConfigForCC = resolveLlmConfigForCC;
|
|
17
25
|
const node_fs_1 = require("node:fs");
|
|
18
26
|
const node_path_1 = require("node:path");
|
|
19
27
|
const node_os_1 = require("node:os");
|
|
@@ -51,6 +59,78 @@ function resolveLlmConfig(pluginConfig) {
|
|
|
51
59
|
provider: "ollama",
|
|
52
60
|
};
|
|
53
61
|
}
|
|
62
|
+
/**
|
|
63
|
+
* Resolve LLM configuration for Claude Code (no OC config file).
|
|
64
|
+
* Uses env vars only — CC users always have ANTHROPIC_API_KEY.
|
|
65
|
+
* Defaults to Haiku for distillation/scoring (~$0.50/mo).
|
|
66
|
+
*/
|
|
67
|
+
function resolveLlmConfigForCC(overrides) {
|
|
68
|
+
// 1. Explicit overrides (from config file or CLI args)
|
|
69
|
+
if (overrides?.llmBaseUrl && overrides?.llmApiKey) {
|
|
70
|
+
const provider = detectProvider(overrides.llmBaseUrl);
|
|
71
|
+
return {
|
|
72
|
+
baseUrl: overrides.llmBaseUrl,
|
|
73
|
+
apiKey: overrides.llmApiKey,
|
|
74
|
+
model: overrides.llmModel ?? "claude-haiku-4-5-20251001",
|
|
75
|
+
reflectModel: overrides.reflectModel ?? overrides.llmModel ?? "claude-sonnet-4-5-20250514",
|
|
76
|
+
provider,
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
// 2. Hicortex-specific env vars
|
|
80
|
+
const hcBaseUrl = process.env.HICORTEX_LLM_BASE_URL;
|
|
81
|
+
const hcApiKey = process.env.HICORTEX_LLM_API_KEY;
|
|
82
|
+
const hcModel = process.env.HICORTEX_LLM_MODEL;
|
|
83
|
+
if (hcBaseUrl && hcApiKey) {
|
|
84
|
+
const provider = detectProvider(hcBaseUrl);
|
|
85
|
+
return {
|
|
86
|
+
baseUrl: hcBaseUrl,
|
|
87
|
+
apiKey: hcApiKey,
|
|
88
|
+
model: hcModel ?? "claude-haiku-4-5-20251001",
|
|
89
|
+
reflectModel: process.env.HICORTEX_REFLECT_MODEL ?? hcModel ?? "claude-sonnet-4-5-20250514",
|
|
90
|
+
provider,
|
|
91
|
+
};
|
|
92
|
+
}
|
|
93
|
+
// 3. Standard API key env vars (CC users almost always have ANTHROPIC_API_KEY)
|
|
94
|
+
const anthropicKey = process.env.ANTHROPIC_API_KEY;
|
|
95
|
+
if (anthropicKey) {
|
|
96
|
+
return {
|
|
97
|
+
baseUrl: process.env.ANTHROPIC_BASE_URL ?? "https://api.anthropic.com",
|
|
98
|
+
apiKey: anthropicKey,
|
|
99
|
+
model: "claude-haiku-4-5-20251001",
|
|
100
|
+
reflectModel: "claude-sonnet-4-5-20250514",
|
|
101
|
+
provider: "anthropic",
|
|
102
|
+
};
|
|
103
|
+
}
|
|
104
|
+
const openaiKey = process.env.OPENAI_API_KEY;
|
|
105
|
+
if (openaiKey) {
|
|
106
|
+
const baseUrl = process.env.OPENAI_BASE_URL ?? "https://api.openai.com";
|
|
107
|
+
return {
|
|
108
|
+
baseUrl,
|
|
109
|
+
apiKey: openaiKey,
|
|
110
|
+
model: "gpt-4o-mini",
|
|
111
|
+
reflectModel: "gpt-4o-mini",
|
|
112
|
+
provider: detectProvider(baseUrl),
|
|
113
|
+
};
|
|
114
|
+
}
|
|
115
|
+
const googleKey = process.env.GOOGLE_API_KEY;
|
|
116
|
+
if (googleKey) {
|
|
117
|
+
return {
|
|
118
|
+
baseUrl: "https://generativelanguage.googleapis.com/v1beta",
|
|
119
|
+
apiKey: googleKey,
|
|
120
|
+
model: "gemini-2.0-flash",
|
|
121
|
+
reflectModel: "gemini-2.0-flash",
|
|
122
|
+
provider: "google",
|
|
123
|
+
};
|
|
124
|
+
}
|
|
125
|
+
// 4. Ollama fallback
|
|
126
|
+
return {
|
|
127
|
+
baseUrl: "http://localhost:11434",
|
|
128
|
+
apiKey: "",
|
|
129
|
+
model: "qwen3.5:4b",
|
|
130
|
+
reflectModel: "qwen3.5:4b",
|
|
131
|
+
provider: "ollama",
|
|
132
|
+
};
|
|
133
|
+
}
|
|
54
134
|
function detectProvider(url) {
|
|
55
135
|
const u = url.toLowerCase();
|
|
56
136
|
if (u.includes("ollama") || u.includes(":11434"))
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Hicortex MCP HTTP/SSE Server.
|
|
3
|
+
*
|
|
4
|
+
* Persistent HTTP server that exposes Hicortex tools via MCP protocol.
|
|
5
|
+
* Shared across all CC sessions (and future Codex/Gemini adapters).
|
|
6
|
+
* One process, one DB connection, one embedder — no per-session overhead.
|
|
7
|
+
*
|
|
8
|
+
* Endpoints:
|
|
9
|
+
* GET /health — health check
|
|
10
|
+
* GET /sse — SSE stream for MCP clients
|
|
11
|
+
* POST /messages — message endpoint for MCP clients
|
|
12
|
+
*/
|
|
13
|
+
export declare function startServer(options?: {
|
|
14
|
+
port?: number;
|
|
15
|
+
host?: string;
|
|
16
|
+
consolidateHour?: number;
|
|
17
|
+
dbPath?: string;
|
|
18
|
+
licenseKey?: string;
|
|
19
|
+
}): Promise<void>;
|